Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

86
vendor/bevy/examples/2d/2d_shapes.rs vendored Normal file
View File

@@ -0,0 +1,86 @@
//! Shows how to render simple primitive shapes with a single color.
//!
//! You can toggle wireframes with the space bar except on wasm. Wasm does not support
//! `POLYGON_MODE_LINE` on the gpu.
use bevy::prelude::*;
#[cfg(not(target_arch = "wasm32"))]
use bevy::sprite::{Wireframe2dConfig, Wireframe2dPlugin};
fn main() {
let mut app = App::new();
app.add_plugins((
DefaultPlugins,
#[cfg(not(target_arch = "wasm32"))]
Wireframe2dPlugin::default(),
))
.add_systems(Startup, setup);
#[cfg(not(target_arch = "wasm32"))]
app.add_systems(Update, toggle_wireframe);
app.run();
}
const X_EXTENT: f32 = 900.;
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
commands.spawn(Camera2d);
let shapes = [
meshes.add(Circle::new(50.0)),
meshes.add(CircularSector::new(50.0, 1.0)),
meshes.add(CircularSegment::new(50.0, 1.25)),
meshes.add(Ellipse::new(25.0, 50.0)),
meshes.add(Annulus::new(25.0, 50.0)),
meshes.add(Capsule2d::new(25.0, 50.0)),
meshes.add(Rhombus::new(75.0, 100.0)),
meshes.add(Rectangle::new(50.0, 100.0)),
meshes.add(RegularPolygon::new(50.0, 6)),
meshes.add(Triangle2d::new(
Vec2::Y * 50.0,
Vec2::new(-50.0, -50.0),
Vec2::new(50.0, -50.0),
)),
];
let num_shapes = shapes.len();
for (i, shape) in shapes.into_iter().enumerate() {
// Distribute colors evenly across the rainbow.
let color = Color::hsl(360. * i as f32 / num_shapes as f32, 0.95, 0.7);
commands.spawn((
Mesh2d(shape),
MeshMaterial2d(materials.add(color)),
Transform::from_xyz(
// Distribute shapes from -X_EXTENT/2 to +X_EXTENT/2.
-X_EXTENT / 2. + i as f32 / (num_shapes - 1) as f32 * X_EXTENT,
0.0,
0.0,
),
));
}
#[cfg(not(target_arch = "wasm32"))]
commands.spawn((
Text::new("Press space to toggle wireframes"),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
#[cfg(not(target_arch = "wasm32"))]
fn toggle_wireframe(
mut wireframe_config: ResMut<Wireframe2dConfig>,
keyboard: Res<ButtonInput<KeyCode>>,
) {
if keyboard.just_pressed(KeyCode::Space) {
wireframe_config.global = !wireframe_config.global;
}
}

View File

@@ -0,0 +1,191 @@
//! This example demonstrates how to use the `Camera::viewport_to_world_2d` method with a dynamic viewport and camera.
use bevy::{
color::palettes::{
basic::WHITE,
css::{GREEN, RED},
},
math::ops::powf,
prelude::*,
render::camera::Viewport,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(FixedUpdate, controls)
.add_systems(
PostUpdate,
draw_cursor.after(TransformSystem::TransformPropagate),
)
.run();
}
fn draw_cursor(
camera_query: Single<(&Camera, &GlobalTransform)>,
window: Query<&Window>,
mut gizmos: Gizmos,
) {
let (camera, camera_transform) = *camera_query;
let Ok(window) = window.single() else {
return;
};
let Some(cursor_position) = window.cursor_position() else {
return;
};
// Calculate a world position based on the cursor's position.
let Ok(world_pos) = camera.viewport_to_world_2d(camera_transform, cursor_position) else {
return;
};
// To test Camera::world_to_viewport, convert result back to viewport space and then back to world space.
let Ok(viewport_check) = camera.world_to_viewport(camera_transform, world_pos.extend(0.0))
else {
return;
};
let Ok(world_check) = camera.viewport_to_world_2d(camera_transform, viewport_check.xy()) else {
return;
};
gizmos.circle_2d(world_pos, 10., WHITE);
// Should be the same as world_pos
gizmos.circle_2d(world_check, 8., RED);
}
fn controls(
mut camera_query: Query<(&mut Camera, &mut Transform, &mut Projection)>,
window: Query<&Window>,
input: Res<ButtonInput<KeyCode>>,
time: Res<Time<Fixed>>,
) {
let Ok(window) = window.single() else {
return;
};
let Ok((mut camera, mut transform, mut projection)) = camera_query.single_mut() else {
return;
};
let fspeed = 600.0 * time.delta_secs();
let uspeed = fspeed as u32;
let window_size = window.resolution.physical_size();
// Camera movement controls
if input.pressed(KeyCode::ArrowUp) {
transform.translation.y += fspeed;
}
if input.pressed(KeyCode::ArrowDown) {
transform.translation.y -= fspeed;
}
if input.pressed(KeyCode::ArrowLeft) {
transform.translation.x -= fspeed;
}
if input.pressed(KeyCode::ArrowRight) {
transform.translation.x += fspeed;
}
// Camera zoom controls
if let Projection::Orthographic(projection2d) = &mut *projection {
if input.pressed(KeyCode::Comma) {
projection2d.scale *= powf(4.0f32, time.delta_secs());
}
if input.pressed(KeyCode::Period) {
projection2d.scale *= powf(0.25f32, time.delta_secs());
}
}
if let Some(viewport) = camera.viewport.as_mut() {
// Viewport movement controls
if input.pressed(KeyCode::KeyW) {
viewport.physical_position.y = viewport.physical_position.y.saturating_sub(uspeed);
}
if input.pressed(KeyCode::KeyS) {
viewport.physical_position.y += uspeed;
}
if input.pressed(KeyCode::KeyA) {
viewport.physical_position.x = viewport.physical_position.x.saturating_sub(uspeed);
}
if input.pressed(KeyCode::KeyD) {
viewport.physical_position.x += uspeed;
}
// Bound viewport position so it doesn't go off-screen
viewport.physical_position = viewport
.physical_position
.min(window_size - viewport.physical_size);
// Viewport size controls
if input.pressed(KeyCode::KeyI) {
viewport.physical_size.y = viewport.physical_size.y.saturating_sub(uspeed);
}
if input.pressed(KeyCode::KeyK) {
viewport.physical_size.y += uspeed;
}
if input.pressed(KeyCode::KeyJ) {
viewport.physical_size.x = viewport.physical_size.x.saturating_sub(uspeed);
}
if input.pressed(KeyCode::KeyL) {
viewport.physical_size.x += uspeed;
}
// Bound viewport size so it doesn't go off-screen
viewport.physical_size = viewport
.physical_size
.min(window_size - viewport.physical_position)
.max(UVec2::new(20, 20));
}
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
window: Single<&Window>,
) {
let window_size = window.resolution.physical_size().as_vec2();
// Initialize centered, non-window-filling viewport
commands.spawn((
Camera2d,
Camera {
viewport: Some(Viewport {
physical_position: (window_size * 0.125).as_uvec2(),
physical_size: (window_size * 0.75).as_uvec2(),
..default()
}),
..default()
},
));
// Create a minimal UI explaining how to interact with the example
commands.spawn((
Text::new(
"Move the mouse to see the circle follow your cursor.\n\
Use the arrow keys to move the camera.\n\
Use the comma and period keys to zoom in and out.\n\
Use the WASD keys to move the viewport.\n\
Use the IJKL keys to resize the viewport.",
),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
// Add mesh to make camera movement visible
commands.spawn((
Mesh2d(meshes.add(Rectangle::new(40.0, 20.0))),
MeshMaterial2d(materials.add(Color::from(GREEN))),
));
// Add background to visualize viewport bounds
commands.spawn((
Mesh2d(meshes.add(Rectangle::new(50000.0, 50000.0))),
MeshMaterial2d(materials.add(Color::linear_rgb(0.01, 0.01, 0.01))),
Transform::from_translation(Vec3::new(0.0, 0.0, -200.0)),
));
}

214
vendor/bevy/examples/2d/bloom_2d.rs vendored Normal file
View File

@@ -0,0 +1,214 @@
//! Illustrates bloom post-processing in 2d.
use bevy::{
core_pipeline::{
bloom::{Bloom, BloomCompositeMode},
tonemapping::{DebandDither, Tonemapping},
},
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, update_bloom_settings)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
asset_server: Res<AssetServer>,
) {
commands.spawn((
Camera2d,
Camera {
hdr: true, // 1. HDR is required for bloom
clear_color: ClearColorConfig::Custom(Color::BLACK),
..default()
},
Tonemapping::TonyMcMapface, // 2. Using a tonemapper that desaturates to white is recommended
Bloom::default(), // 3. Enable bloom for the camera
DebandDither::Enabled, // Optional: bloom causes gradients which cause banding
));
// Sprite
commands.spawn(Sprite {
image: asset_server.load("branding/bevy_bird_dark.png"),
color: Color::srgb(5.0, 5.0, 5.0), // 4. Put something bright in a dark environment to see the effect
custom_size: Some(Vec2::splat(160.0)),
..default()
});
// Circle mesh
commands.spawn((
Mesh2d(meshes.add(Circle::new(100.))),
// 4. Put something bright in a dark environment to see the effect
MeshMaterial2d(materials.add(Color::srgb(7.5, 0.0, 7.5))),
Transform::from_translation(Vec3::new(-200., 0., 0.)),
));
// Hexagon mesh
commands.spawn((
Mesh2d(meshes.add(RegularPolygon::new(100., 6))),
// 4. Put something bright in a dark environment to see the effect
MeshMaterial2d(materials.add(Color::srgb(6.25, 9.4, 9.1))),
Transform::from_translation(Vec3::new(200., 0., 0.)),
));
// UI
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// ------------------------------------------------------------------------------------------------
fn update_bloom_settings(
camera: Single<(Entity, &Tonemapping, Option<&mut Bloom>), With<Camera>>,
mut text: Single<&mut Text>,
mut commands: Commands,
keycode: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
) {
let (camera_entity, tonemapping, bloom) = camera.into_inner();
match bloom {
Some(mut bloom) => {
text.0 = "Bloom (Toggle: Space)\n".to_string();
text.push_str(&format!("(Q/A) Intensity: {}\n", bloom.intensity));
text.push_str(&format!(
"(W/S) Low-frequency boost: {}\n",
bloom.low_frequency_boost
));
text.push_str(&format!(
"(E/D) Low-frequency boost curvature: {}\n",
bloom.low_frequency_boost_curvature
));
text.push_str(&format!(
"(R/F) High-pass frequency: {}\n",
bloom.high_pass_frequency
));
text.push_str(&format!(
"(T/G) Mode: {}\n",
match bloom.composite_mode {
BloomCompositeMode::EnergyConserving => "Energy-conserving",
BloomCompositeMode::Additive => "Additive",
}
));
text.push_str(&format!("(Y/H) Threshold: {}\n", bloom.prefilter.threshold));
text.push_str(&format!(
"(U/J) Threshold softness: {}\n",
bloom.prefilter.threshold_softness
));
text.push_str(&format!("(I/K) Horizontal Scale: {}\n", bloom.scale.x));
if keycode.just_pressed(KeyCode::Space) {
commands.entity(camera_entity).remove::<Bloom>();
}
let dt = time.delta_secs();
if keycode.pressed(KeyCode::KeyA) {
bloom.intensity -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyQ) {
bloom.intensity += dt / 10.0;
}
bloom.intensity = bloom.intensity.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyS) {
bloom.low_frequency_boost -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyW) {
bloom.low_frequency_boost += dt / 10.0;
}
bloom.low_frequency_boost = bloom.low_frequency_boost.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyD) {
bloom.low_frequency_boost_curvature -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyE) {
bloom.low_frequency_boost_curvature += dt / 10.0;
}
bloom.low_frequency_boost_curvature =
bloom.low_frequency_boost_curvature.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyF) {
bloom.high_pass_frequency -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyR) {
bloom.high_pass_frequency += dt / 10.0;
}
bloom.high_pass_frequency = bloom.high_pass_frequency.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyG) {
bloom.composite_mode = BloomCompositeMode::Additive;
}
if keycode.pressed(KeyCode::KeyT) {
bloom.composite_mode = BloomCompositeMode::EnergyConserving;
}
if keycode.pressed(KeyCode::KeyH) {
bloom.prefilter.threshold -= dt;
}
if keycode.pressed(KeyCode::KeyY) {
bloom.prefilter.threshold += dt;
}
bloom.prefilter.threshold = bloom.prefilter.threshold.max(0.0);
if keycode.pressed(KeyCode::KeyJ) {
bloom.prefilter.threshold_softness -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyU) {
bloom.prefilter.threshold_softness += dt / 10.0;
}
bloom.prefilter.threshold_softness = bloom.prefilter.threshold_softness.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyK) {
bloom.scale.x -= dt * 2.0;
}
if keycode.pressed(KeyCode::KeyI) {
bloom.scale.x += dt * 2.0;
}
bloom.scale.x = bloom.scale.x.clamp(0.0, 16.0);
}
None => {
text.0 = "Bloom: Off (Toggle: Space)\n".to_string();
if keycode.just_pressed(KeyCode::Space) {
commands.entity(camera_entity).insert(Bloom::default());
}
}
}
text.push_str(&format!("(O) Tonemapping: {:?}\n", tonemapping));
if keycode.just_pressed(KeyCode::KeyO) {
commands
.entity(camera_entity)
.insert(next_tonemap(tonemapping));
}
}
/// Get the next Tonemapping algorithm
fn next_tonemap(tonemapping: &Tonemapping) -> Tonemapping {
match tonemapping {
Tonemapping::None => Tonemapping::AcesFitted,
Tonemapping::AcesFitted => Tonemapping::AgX,
Tonemapping::AgX => Tonemapping::BlenderFilmic,
Tonemapping::BlenderFilmic => Tonemapping::Reinhard,
Tonemapping::Reinhard => Tonemapping::ReinhardLuminance,
Tonemapping::ReinhardLuminance => Tonemapping::SomewhatBoringDisplayTransform,
Tonemapping::SomewhatBoringDisplayTransform => Tonemapping::TonyMcMapface,
Tonemapping::TonyMcMapface => Tonemapping::None,
}
}

144
vendor/bevy/examples/2d/cpu_draw.rs vendored Normal file
View File

@@ -0,0 +1,144 @@
//! Example of how to draw to a texture from the CPU.
//!
//! You can set the values of individual pixels to whatever you want.
//! Bevy provides user-friendly APIs that work with [`Color`](bevy::color::Color)
//! values and automatically perform any necessary conversions and encoding
//! into the texture's native pixel format.
use bevy::color::{color_difference::EuclideanDistance, palettes::css};
use bevy::prelude::*;
use bevy::render::{
render_asset::RenderAssetUsages,
render_resource::{Extent3d, TextureDimension, TextureFormat},
};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
const IMAGE_WIDTH: u32 = 256;
const IMAGE_HEIGHT: u32 = 256;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
// In this example, we will use a fixed timestep to draw a pattern on the screen
// one pixel at a time, so the pattern will gradually emerge over time, and
// the speed at which it appears is not tied to the framerate.
// Let's make the fixed update very fast, so it doesn't take too long. :)
.insert_resource(Time::<Fixed>::from_hz(1024.0))
.add_systems(Startup, setup)
.add_systems(FixedUpdate, draw)
.run();
}
/// Store the image handle that we will draw to, here.
#[derive(Resource)]
struct MyProcGenImage(Handle<Image>);
#[derive(Resource)]
struct SeededRng(ChaCha8Rng);
fn setup(mut commands: Commands, mut images: ResMut<Assets<Image>>) {
commands.spawn(Camera2d);
// Create an image that we are going to draw into
let mut image = Image::new_fill(
// 2D image of size 256x256
Extent3d {
width: IMAGE_WIDTH,
height: IMAGE_HEIGHT,
depth_or_array_layers: 1,
},
TextureDimension::D2,
// Initialize it with a beige color
&(css::BEIGE.to_u8_array()),
// Use the same encoding as the color we set
TextureFormat::Rgba8UnormSrgb,
RenderAssetUsages::MAIN_WORLD | RenderAssetUsages::RENDER_WORLD,
);
// To make it extra fancy, we can set the Alpha of each pixel,
// so that it fades out in a circular fashion.
for y in 0..IMAGE_HEIGHT {
for x in 0..IMAGE_WIDTH {
let center = Vec2::new(IMAGE_WIDTH as f32 / 2.0, IMAGE_HEIGHT as f32 / 2.0);
let max_radius = IMAGE_HEIGHT.min(IMAGE_WIDTH) as f32 / 2.0;
let r = Vec2::new(x as f32, y as f32).distance(center);
let a = 1.0 - (r / max_radius).clamp(0.0, 1.0);
// Here we will set the A value by accessing the raw data bytes.
// (it is the 4th byte of each pixel, as per our `TextureFormat`)
// Find our pixel by its coordinates
let pixel_bytes = image.pixel_bytes_mut(UVec3::new(x, y, 0)).unwrap();
// Convert our f32 to u8
pixel_bytes[3] = (a * u8::MAX as f32) as u8;
}
}
// Add it to Bevy's assets, so it can be used for rendering
// this will give us a handle we can use
// (to display it in a sprite, or as part of UI, etc.)
let handle = images.add(image);
// Create a sprite entity using our image
commands.spawn(Sprite::from_image(handle.clone()));
commands.insert_resource(MyProcGenImage(handle));
// We're seeding the PRNG here to make this example deterministic for testing purposes.
// This isn't strictly required in practical use unless you need your app to be deterministic.
let seeded_rng = ChaCha8Rng::seed_from_u64(19878367467712);
commands.insert_resource(SeededRng(seeded_rng));
}
/// Every fixed update tick, draw one more pixel to make a spiral pattern
fn draw(
my_handle: Res<MyProcGenImage>,
mut images: ResMut<Assets<Image>>,
// Used to keep track of where we are
mut i: Local<u32>,
mut draw_color: Local<Color>,
mut seeded_rng: ResMut<SeededRng>,
) {
if *i == 0 {
// Generate a random color on first run.
*draw_color = Color::linear_rgb(
seeded_rng.0.r#gen(),
seeded_rng.0.r#gen(),
seeded_rng.0.r#gen(),
);
}
// Get the image from Bevy's asset storage.
let image = images.get_mut(&my_handle.0).expect("Image not found");
// Compute the position of the pixel to draw.
let center = Vec2::new(IMAGE_WIDTH as f32 / 2.0, IMAGE_HEIGHT as f32 / 2.0);
let max_radius = IMAGE_HEIGHT.min(IMAGE_WIDTH) as f32 / 2.0;
let rot_speed = 0.0123;
let period = 0.12345;
let r = ops::sin(*i as f32 * period) * max_radius;
let xy = Vec2::from_angle(*i as f32 * rot_speed) * r + center;
let (x, y) = (xy.x as u32, xy.y as u32);
// Get the old color of that pixel.
let old_color = image.get_color_at(x, y).unwrap();
// If the old color is our current color, change our drawing color.
let tolerance = 1.0 / 255.0;
if old_color.distance(&draw_color) <= tolerance {
*draw_color = Color::linear_rgb(
seeded_rng.0.r#gen(),
seeded_rng.0.r#gen(),
seeded_rng.0.r#gen(),
);
}
// Set the new color, but keep old alpha value from image.
image
.set_color_at(x, y, draw_color.with_alpha(old_color.alpha()))
.unwrap();
*i += 1;
}

View File

@@ -0,0 +1,96 @@
//! Renders a glTF mesh in 2D with a custom vertex attribute.
use bevy::{
gltf::GltfPlugin,
prelude::*,
reflect::TypePath,
render::{
mesh::{MeshVertexAttribute, MeshVertexBufferLayoutRef},
render_resource::*,
},
sprite::{Material2d, Material2dKey, Material2dPlugin},
};
/// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/custom_gltf_2d.wgsl";
/// This vertex attribute supplies barycentric coordinates for each triangle.
///
/// Each component of the vector corresponds to one corner of a triangle. It's
/// equal to 1.0 in that corner and 0.0 in the other two. Hence, its value in
/// the fragment shader indicates proximity to a corner or the opposite edge.
const ATTRIBUTE_BARYCENTRIC: MeshVertexAttribute =
MeshVertexAttribute::new("Barycentric", 2137464976, VertexFormat::Float32x3);
fn main() {
App::new()
.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 1.0 / 5.0f32,
..default()
})
.add_plugins((
DefaultPlugins.set(
GltfPlugin::default()
// Map a custom glTF attribute name to a `MeshVertexAttribute`.
// The glTF file used here has an attribute name with *two*
// underscores: __BARYCENTRIC
// One is stripped to do the comparison here.
.add_custom_vertex_attribute("_BARYCENTRIC", ATTRIBUTE_BARYCENTRIC),
),
Material2dPlugin::<CustomMaterial>::default(),
))
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut materials: ResMut<Assets<CustomMaterial>>,
) {
// Add a mesh loaded from a glTF file. This mesh has data for `ATTRIBUTE_BARYCENTRIC`.
let mesh = asset_server.load(
GltfAssetLabel::Primitive {
mesh: 0,
primitive: 0,
}
.from_asset("models/barycentric/barycentric.gltf"),
);
commands.spawn((
Mesh2d(mesh),
MeshMaterial2d(materials.add(CustomMaterial {})),
Transform::from_scale(150.0 * Vec3::ONE),
));
commands.spawn(Camera2d);
}
/// This custom material uses barycentric coordinates from
/// `ATTRIBUTE_BARYCENTRIC` to shade a white border around each triangle. The
/// thickness of the border is animated using the global time shader uniform.
#[derive(Asset, TypePath, AsBindGroup, Debug, Clone)]
struct CustomMaterial {}
impl Material2d for CustomMaterial {
fn vertex_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
fn fragment_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
fn specialize(
descriptor: &mut RenderPipelineDescriptor,
layout: &MeshVertexBufferLayoutRef,
_key: Material2dKey<Self>,
) -> Result<(), SpecializedMeshPipelineError> {
let vertex_layout = layout.0.get_layout(&[
Mesh::ATTRIBUTE_POSITION.at_shader_location(0),
Mesh::ATTRIBUTE_COLOR.at_shader_location(1),
ATTRIBUTE_BARYCENTRIC.at_shader_location(2),
])?;
descriptor.vertex.buffers = vec![vertex_layout];
Ok(())
}
}

24
vendor/bevy/examples/2d/mesh2d.rs vendored Normal file
View File

@@ -0,0 +1,24 @@
//! Shows how to render a polygonal [`Mesh`], generated from a [`Rectangle`] primitive, in a 2D scene.
use bevy::{color::palettes::basic::PURPLE, prelude::*};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
commands.spawn(Camera2d);
commands.spawn((
Mesh2d(meshes.add(Rectangle::default())),
MeshMaterial2d(materials.add(Color::from(PURPLE))),
Transform::default().with_scale(Vec3::splat(128.)),
));
}

View File

@@ -0,0 +1,97 @@
//! This example is used to test how transforms interact with alpha modes for [`Mesh2d`] entities with a [`MeshMaterial2d`].
//! This makes sure the depth buffer is correctly being used for opaque and transparent 2d meshes
use bevy::{
color::palettes::css::{BLUE, GREEN, WHITE},
prelude::*,
sprite::AlphaMode2d,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
commands.spawn(Camera2d);
let texture_handle = asset_server.load("branding/icon.png");
let mesh_handle = meshes.add(Rectangle::from_size(Vec2::splat(256.0)));
// opaque
// Each sprite should be square with the transparent parts being completely black
// The blue sprite should be on top with the white and green one behind it
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial {
color: WHITE.into(),
alpha_mode: AlphaMode2d::Opaque,
texture: Some(texture_handle.clone()),
..default()
})),
Transform::from_xyz(-400.0, 0.0, 0.0),
));
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial {
color: BLUE.into(),
alpha_mode: AlphaMode2d::Opaque,
texture: Some(texture_handle.clone()),
..default()
})),
Transform::from_xyz(-300.0, 0.0, 1.0),
));
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial {
color: GREEN.into(),
alpha_mode: AlphaMode2d::Opaque,
texture: Some(texture_handle.clone()),
..default()
})),
Transform::from_xyz(-200.0, 0.0, -1.0),
));
// Test the interaction between opaque/mask and transparent meshes
// The white sprite should be:
// - only the icon is opaque but background is transparent
// - on top of the green sprite
// - behind the blue sprite
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial {
color: WHITE.into(),
alpha_mode: AlphaMode2d::Mask(0.5),
texture: Some(texture_handle.clone()),
..default()
})),
Transform::from_xyz(200.0, 0.0, 0.0),
));
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial {
color: BLUE.with_alpha(0.7).into(),
alpha_mode: AlphaMode2d::Blend,
texture: Some(texture_handle.clone()),
..default()
})),
Transform::from_xyz(300.0, 0.0, 1.0),
));
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial {
color: GREEN.with_alpha(0.7).into(),
alpha_mode: AlphaMode2d::Blend,
texture: Some(texture_handle),
..default()
})),
Transform::from_xyz(400.0, 0.0, -1.0),
));
}

120
vendor/bevy/examples/2d/mesh2d_arcs.rs vendored Normal file
View File

@@ -0,0 +1,120 @@
//! Demonstrates UV mappings of the [`CircularSector`] and [`CircularSegment`] primitives.
//!
//! Also draws the bounding boxes and circles of the primitives.
use std::f32::consts::FRAC_PI_2;
use bevy::{
color::palettes::css::{BLUE, GRAY, RED},
math::{
bounding::{Bounded2d, BoundingVolume},
Isometry2d,
},
prelude::*,
render::mesh::{CircularMeshUvMode, CircularSectorMeshBuilder, CircularSegmentMeshBuilder},
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(
Update,
(
draw_bounds::<CircularSector>,
draw_bounds::<CircularSegment>,
),
)
.run();
}
#[derive(Component, Debug)]
struct DrawBounds<Shape: Bounded2d + Send + Sync + 'static>(Shape);
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
let material = materials.add(asset_server.load("branding/icon.png"));
commands.spawn((
Camera2d,
Camera {
clear_color: ClearColorConfig::Custom(GRAY.into()),
..default()
},
));
const NUM_SLICES: i32 = 8;
const SPACING_X: f32 = 100.0;
const OFFSET_X: f32 = SPACING_X * (NUM_SLICES - 1) as f32 / 2.0;
// This draws NUM_SLICES copies of the Bevy logo as circular sectors and segments,
// with successively larger angles up to a complete circle.
for i in 0..NUM_SLICES {
let fraction = (i + 1) as f32 / NUM_SLICES as f32;
let sector = CircularSector::from_turns(40.0, fraction);
// We want to rotate the circular sector so that the sectors appear clockwise from north.
// We must rotate it both in the Transform and in the mesh's UV mappings.
let sector_angle = -sector.half_angle();
let sector_mesh =
CircularSectorMeshBuilder::new(sector).uv_mode(CircularMeshUvMode::Mask {
angle: sector_angle,
});
commands.spawn((
Mesh2d(meshes.add(sector_mesh)),
MeshMaterial2d(material.clone()),
Transform {
translation: Vec3::new(SPACING_X * i as f32 - OFFSET_X, 50.0, 0.0),
rotation: Quat::from_rotation_z(sector_angle),
..default()
},
DrawBounds(sector),
));
let segment = CircularSegment::from_turns(40.0, fraction);
// For the circular segment, we will draw Bevy charging forward, which requires rotating the
// shape and texture by 90 degrees.
//
// Note that this may be unintuitive; it may feel like we should rotate the texture by the
// opposite angle to preserve the orientation of Bevy. But the angle is not the angle of the
// texture itself, rather it is the angle at which the vertices are mapped onto the texture.
// so it is the negative of what you might otherwise expect.
let segment_angle = -FRAC_PI_2;
let segment_mesh =
CircularSegmentMeshBuilder::new(segment).uv_mode(CircularMeshUvMode::Mask {
angle: -segment_angle,
});
commands.spawn((
Mesh2d(meshes.add(segment_mesh)),
MeshMaterial2d(material.clone()),
Transform {
translation: Vec3::new(SPACING_X * i as f32 - OFFSET_X, -50.0, 0.0),
rotation: Quat::from_rotation_z(segment_angle),
..default()
},
DrawBounds(segment),
));
}
}
fn draw_bounds<Shape: Bounded2d + Send + Sync + 'static>(
q: Query<(&DrawBounds<Shape>, &GlobalTransform)>,
mut gizmos: Gizmos,
) {
for (shape, transform) in &q {
let (_, rotation, translation) = transform.to_scale_rotation_translation();
let translation = translation.truncate();
let rotation = rotation.to_euler(EulerRot::XYZ).2;
let isometry = Isometry2d::new(translation, Rot2::radians(rotation));
let aabb = shape.0.aabb_2d(isometry);
gizmos.rect_2d(aabb.center(), aabb.half_size() * 2.0, RED);
let bounding_circle = shape.0.bounding_circle(isometry);
gizmos.circle_2d(bounding_circle.center, bounding_circle.radius(), BLUE);
}
}

432
vendor/bevy/examples/2d/mesh2d_manual.rs vendored Normal file
View File

@@ -0,0 +1,432 @@
//! This example shows how to manually render 2d items using "mid level render apis" with a custom
//! pipeline for 2d meshes.
//! It doesn't use the [`Material2d`] abstraction, but changes the vertex buffer to include vertex color.
//! Check out the "mesh2d" example for simpler / higher level 2d meshes.
//!
//! [`Material2d`]: bevy::sprite::Material2d
use bevy::{
asset::weak_handle,
color::palettes::basic::YELLOW,
core_pipeline::core_2d::{Transparent2d, CORE_2D_DEPTH_FORMAT},
math::{ops, FloatOrd},
prelude::*,
render::{
mesh::{Indices, MeshVertexAttribute, RenderMesh},
render_asset::{RenderAssetUsages, RenderAssets},
render_phase::{
AddRenderCommand, DrawFunctions, PhaseItemExtraIndex, SetItemPipeline,
ViewSortedRenderPhases,
},
render_resource::{
BlendState, ColorTargetState, ColorWrites, CompareFunction, DepthBiasState,
DepthStencilState, Face, FragmentState, FrontFace, MultisampleState, PipelineCache,
PolygonMode, PrimitiveState, PrimitiveTopology, RenderPipelineDescriptor,
SpecializedRenderPipeline, SpecializedRenderPipelines, StencilFaceState, StencilState,
TextureFormat, VertexBufferLayout, VertexFormat, VertexState, VertexStepMode,
},
sync_component::SyncComponentPlugin,
sync_world::{MainEntityHashMap, RenderEntity},
view::{ExtractedView, RenderVisibleEntities, ViewTarget},
Extract, Render, RenderApp, RenderSet,
},
sprite::{
extract_mesh2d, DrawMesh2d, Material2dBindGroupId, Mesh2dPipeline, Mesh2dPipelineKey,
Mesh2dTransforms, MeshFlags, RenderMesh2dInstance, SetMesh2dBindGroup,
SetMesh2dViewBindGroup,
},
};
use std::f32::consts::PI;
fn main() {
App::new()
.add_plugins((DefaultPlugins, ColoredMesh2dPlugin))
.add_systems(Startup, star)
.run();
}
fn star(
mut commands: Commands,
// We will add a new Mesh for the star being created
mut meshes: ResMut<Assets<Mesh>>,
) {
// Let's define the mesh for the object we want to draw: a nice star.
// We will specify here what kind of topology is used to define the mesh,
// that is, how triangles are built from the vertices. We will use a
// triangle list, meaning that each vertex of the triangle has to be
// specified. We set `RenderAssetUsages::RENDER_WORLD`, meaning this mesh
// will not be accessible in future frames from the `meshes` resource, in
// order to save on memory once it has been uploaded to the GPU.
let mut star = Mesh::new(
PrimitiveTopology::TriangleList,
RenderAssetUsages::RENDER_WORLD,
);
// Vertices need to have a position attribute. We will use the following
// vertices (I hope you can spot the star in the schema).
//
// 1
//
// 10 2
// 9 0 3
// 8 4
// 6
// 7 5
//
// These vertices are specified in 3D space.
let mut v_pos = vec![[0.0, 0.0, 0.0]];
for i in 0..10 {
// The angle between each vertex is 1/10 of a full rotation.
let a = i as f32 * PI / 5.0;
// The radius of inner vertices (even indices) is 100. For outer vertices (odd indices) it's 200.
let r = (1 - i % 2) as f32 * 100.0 + 100.0;
// Add the vertex position.
v_pos.push([r * ops::sin(a), r * ops::cos(a), 0.0]);
}
// Set the position attribute
star.insert_attribute(Mesh::ATTRIBUTE_POSITION, v_pos);
// And a RGB color attribute as well. A built-in `Mesh::ATTRIBUTE_COLOR` exists, but we
// use a custom vertex attribute here for demonstration purposes.
let mut v_color: Vec<u32> = vec![LinearRgba::BLACK.as_u32()];
v_color.extend_from_slice(&[LinearRgba::from(YELLOW).as_u32(); 10]);
star.insert_attribute(
MeshVertexAttribute::new("Vertex_Color", 1, VertexFormat::Uint32),
v_color,
);
// Now, we specify the indices of the vertex that are going to compose the
// triangles in our star. Vertices in triangles have to be specified in CCW
// winding (that will be the front face, colored). Since we are using
// triangle list, we will specify each triangle as 3 vertices
// First triangle: 0, 2, 1
// Second triangle: 0, 3, 2
// Third triangle: 0, 4, 3
// etc
// Last triangle: 0, 1, 10
let mut indices = vec![0, 1, 10];
for i in 2..=10 {
indices.extend_from_slice(&[0, i, i - 1]);
}
star.insert_indices(Indices::U32(indices));
// We can now spawn the entities for the star and the camera
commands.spawn((
// We use a marker component to identify the custom colored meshes
ColoredMesh2d,
// The `Handle<Mesh>` needs to be wrapped in a `Mesh2d` for 2D rendering
Mesh2d(meshes.add(star)),
));
commands.spawn(Camera2d);
}
/// A marker component for colored 2d meshes
#[derive(Component, Default)]
pub struct ColoredMesh2d;
/// Custom pipeline for 2d meshes with vertex colors
#[derive(Resource)]
pub struct ColoredMesh2dPipeline {
/// This pipeline wraps the standard [`Mesh2dPipeline`]
mesh2d_pipeline: Mesh2dPipeline,
}
impl FromWorld for ColoredMesh2dPipeline {
fn from_world(world: &mut World) -> Self {
Self {
mesh2d_pipeline: Mesh2dPipeline::from_world(world),
}
}
}
// We implement `SpecializedPipeline` to customize the default rendering from `Mesh2dPipeline`
impl SpecializedRenderPipeline for ColoredMesh2dPipeline {
type Key = Mesh2dPipelineKey;
fn specialize(&self, key: Self::Key) -> RenderPipelineDescriptor {
// Customize how to store the meshes' vertex attributes in the vertex buffer
// Our meshes only have position and color
let formats = vec![
// Position
VertexFormat::Float32x3,
// Color
VertexFormat::Uint32,
];
let vertex_layout =
VertexBufferLayout::from_vertex_formats(VertexStepMode::Vertex, formats);
let format = match key.contains(Mesh2dPipelineKey::HDR) {
true => ViewTarget::TEXTURE_FORMAT_HDR,
false => TextureFormat::bevy_default(),
};
RenderPipelineDescriptor {
vertex: VertexState {
// Use our custom shader
shader: COLORED_MESH2D_SHADER_HANDLE,
entry_point: "vertex".into(),
shader_defs: vec![],
// Use our custom vertex buffer
buffers: vec![vertex_layout],
},
fragment: Some(FragmentState {
// Use our custom shader
shader: COLORED_MESH2D_SHADER_HANDLE,
shader_defs: vec![],
entry_point: "fragment".into(),
targets: vec![Some(ColorTargetState {
format,
blend: Some(BlendState::ALPHA_BLENDING),
write_mask: ColorWrites::ALL,
})],
}),
// Use the two standard uniforms for 2d meshes
layout: vec![
// Bind group 0 is the view uniform
self.mesh2d_pipeline.view_layout.clone(),
// Bind group 1 is the mesh uniform
self.mesh2d_pipeline.mesh_layout.clone(),
],
push_constant_ranges: vec![],
primitive: PrimitiveState {
front_face: FrontFace::Ccw,
cull_mode: Some(Face::Back),
unclipped_depth: false,
polygon_mode: PolygonMode::Fill,
conservative: false,
topology: key.primitive_topology(),
strip_index_format: None,
},
depth_stencil: Some(DepthStencilState {
format: CORE_2D_DEPTH_FORMAT,
depth_write_enabled: false,
depth_compare: CompareFunction::GreaterEqual,
stencil: StencilState {
front: StencilFaceState::IGNORE,
back: StencilFaceState::IGNORE,
read_mask: 0,
write_mask: 0,
},
bias: DepthBiasState {
constant: 0,
slope_scale: 0.0,
clamp: 0.0,
},
}),
multisample: MultisampleState {
count: key.msaa_samples(),
mask: !0,
alpha_to_coverage_enabled: false,
},
label: Some("colored_mesh2d_pipeline".into()),
zero_initialize_workgroup_memory: false,
}
}
}
// This specifies how to render a colored 2d mesh
type DrawColoredMesh2d = (
// Set the pipeline
SetItemPipeline,
// Set the view uniform as bind group 0
SetMesh2dViewBindGroup<0>,
// Set the mesh uniform as bind group 1
SetMesh2dBindGroup<1>,
// Draw the mesh
DrawMesh2d,
);
// The custom shader can be inline like here, included from another file at build time
// using `include_str!()`, or loaded like any other asset with `asset_server.load()`.
const COLORED_MESH2D_SHADER: &str = r"
// Import the standard 2d mesh uniforms and set their bind groups
#import bevy_sprite::mesh2d_functions
// The structure of the vertex buffer is as specified in `specialize()`
struct Vertex {
@builtin(instance_index) instance_index: u32,
@location(0) position: vec3<f32>,
@location(1) color: u32,
};
struct VertexOutput {
// The vertex shader must set the on-screen position of the vertex
@builtin(position) clip_position: vec4<f32>,
// We pass the vertex color to the fragment shader in location 0
@location(0) color: vec4<f32>,
};
/// Entry point for the vertex shader
@vertex
fn vertex(vertex: Vertex) -> VertexOutput {
var out: VertexOutput;
// Project the world position of the mesh into screen position
let model = mesh2d_functions::get_world_from_local(vertex.instance_index);
out.clip_position = mesh2d_functions::mesh2d_position_local_to_clip(model, vec4<f32>(vertex.position, 1.0));
// Unpack the `u32` from the vertex buffer into the `vec4<f32>` used by the fragment shader
out.color = vec4<f32>((vec4<u32>(vertex.color) >> vec4<u32>(0u, 8u, 16u, 24u)) & vec4<u32>(255u)) / 255.0;
return out;
}
// The input of the fragment shader must correspond to the output of the vertex shader for all `location`s
struct FragmentInput {
// The color is interpolated between vertices by default
@location(0) color: vec4<f32>,
};
/// Entry point for the fragment shader
@fragment
fn fragment(in: FragmentInput) -> @location(0) vec4<f32> {
return in.color;
}
";
/// Plugin that renders [`ColoredMesh2d`]s
pub struct ColoredMesh2dPlugin;
/// Handle to the custom shader with a unique random ID
pub const COLORED_MESH2D_SHADER_HANDLE: Handle<Shader> =
weak_handle!("f48b148f-7373-4638-9900-392b3b3ccc66");
/// Our custom pipeline needs its own instance storage
#[derive(Resource, Deref, DerefMut, Default)]
pub struct RenderColoredMesh2dInstances(MainEntityHashMap<RenderMesh2dInstance>);
impl Plugin for ColoredMesh2dPlugin {
fn build(&self, app: &mut App) {
// Load our custom shader
let mut shaders = app.world_mut().resource_mut::<Assets<Shader>>();
shaders.insert(
&COLORED_MESH2D_SHADER_HANDLE,
Shader::from_wgsl(COLORED_MESH2D_SHADER, file!()),
);
app.add_plugins(SyncComponentPlugin::<ColoredMesh2d>::default());
// Register our custom draw function, and add our render systems
app.get_sub_app_mut(RenderApp)
.unwrap()
.add_render_command::<Transparent2d, DrawColoredMesh2d>()
.init_resource::<SpecializedRenderPipelines<ColoredMesh2dPipeline>>()
.init_resource::<RenderColoredMesh2dInstances>()
.add_systems(
ExtractSchedule,
extract_colored_mesh2d.after(extract_mesh2d),
)
.add_systems(Render, queue_colored_mesh2d.in_set(RenderSet::QueueMeshes));
}
fn finish(&self, app: &mut App) {
// Register our custom pipeline
app.get_sub_app_mut(RenderApp)
.unwrap()
.init_resource::<ColoredMesh2dPipeline>();
}
}
/// Extract the [`ColoredMesh2d`] marker component into the render app
pub fn extract_colored_mesh2d(
mut commands: Commands,
mut previous_len: Local<usize>,
// When extracting, you must use `Extract` to mark the `SystemParam`s
// which should be taken from the main world.
query: Extract<
Query<
(
Entity,
RenderEntity,
&ViewVisibility,
&GlobalTransform,
&Mesh2d,
),
With<ColoredMesh2d>,
>,
>,
mut render_mesh_instances: ResMut<RenderColoredMesh2dInstances>,
) {
let mut values = Vec::with_capacity(*previous_len);
for (entity, render_entity, view_visibility, transform, handle) in &query {
if !view_visibility.get() {
continue;
}
let transforms = Mesh2dTransforms {
world_from_local: (&transform.affine()).into(),
flags: MeshFlags::empty().bits(),
};
values.push((render_entity, ColoredMesh2d));
render_mesh_instances.insert(
entity.into(),
RenderMesh2dInstance {
mesh_asset_id: handle.0.id(),
transforms,
material_bind_group_id: Material2dBindGroupId::default(),
automatic_batching: false,
tag: 0,
},
);
}
*previous_len = values.len();
commands.try_insert_batch(values);
}
/// Queue the 2d meshes marked with [`ColoredMesh2d`] using our custom pipeline and draw function
pub fn queue_colored_mesh2d(
transparent_draw_functions: Res<DrawFunctions<Transparent2d>>,
colored_mesh2d_pipeline: Res<ColoredMesh2dPipeline>,
mut pipelines: ResMut<SpecializedRenderPipelines<ColoredMesh2dPipeline>>,
pipeline_cache: Res<PipelineCache>,
render_meshes: Res<RenderAssets<RenderMesh>>,
render_mesh_instances: Res<RenderColoredMesh2dInstances>,
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<Transparent2d>>,
views: Query<(&RenderVisibleEntities, &ExtractedView, &Msaa)>,
) {
if render_mesh_instances.is_empty() {
return;
}
// Iterate each view (a camera is a view)
for (visible_entities, view, msaa) in &views {
let Some(transparent_phase) = transparent_render_phases.get_mut(&view.retained_view_entity)
else {
continue;
};
let draw_colored_mesh2d = transparent_draw_functions.read().id::<DrawColoredMesh2d>();
let mesh_key = Mesh2dPipelineKey::from_msaa_samples(msaa.samples())
| Mesh2dPipelineKey::from_hdr(view.hdr);
// Queue all entities visible to that view
for (render_entity, visible_entity) in visible_entities.iter::<Mesh2d>() {
if let Some(mesh_instance) = render_mesh_instances.get(visible_entity) {
let mesh2d_handle = mesh_instance.mesh_asset_id;
let mesh2d_transforms = &mesh_instance.transforms;
// Get our specialized pipeline
let mut mesh2d_key = mesh_key;
let Some(mesh) = render_meshes.get(mesh2d_handle) else {
continue;
};
mesh2d_key |= Mesh2dPipelineKey::from_primitive_topology(mesh.primitive_topology());
let pipeline_id =
pipelines.specialize(&pipeline_cache, &colored_mesh2d_pipeline, mesh2d_key);
let mesh_z = mesh2d_transforms.world_from_local.translation.z;
transparent_phase.add(Transparent2d {
entity: (*render_entity, *visible_entity),
draw_function: draw_colored_mesh2d,
pipeline: pipeline_id,
// The 2d render items are sorted according to their z value before rendering,
// in order to get correct transparency
sort_key: FloatOrd(mesh_z),
// This material is not batched
batch_range: 0..1,
extra_index: PhaseItemExtraIndex::None,
extracted_index: usize::MAX,
indexed: mesh.indexed(),
});
}
}
}
}

View File

@@ -0,0 +1,107 @@
//! By default Bevy loads images to textures that clamps the image to the edges
//! This example shows how to configure it to repeat the image instead.
use bevy::{
audio::AudioPlugin,
image::{ImageAddressMode, ImageLoaderSettings, ImageSampler, ImageSamplerDescriptor},
math::Affine2,
prelude::*,
};
/// How much to move some rectangles away from the center
const RECTANGLE_OFFSET: f32 = 250.0;
/// Length of the sides of the rectangle
const RECTANGLE_SIDE: f32 = 200.;
/// How much to move the label away from the rectangle
const LABEL_OFFSET: f32 = (RECTANGLE_SIDE / 2.) + 25.;
fn main() {
App::new()
.add_plugins(DefaultPlugins.build().disable::<AudioPlugin>())
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
// #11111: We use a duplicated image so that it can be load with and without
// settings
let image_with_default_sampler =
asset_server.load("textures/fantasy_ui_borders/panel-border-010.png");
let image_with_repeated_sampler = asset_server.load_with_settings(
"textures/fantasy_ui_borders/panel-border-010-repeated.png",
|s: &mut _| {
*s = ImageLoaderSettings {
sampler: ImageSampler::Descriptor(ImageSamplerDescriptor {
// rewriting mode to repeat image,
address_mode_u: ImageAddressMode::Repeat,
address_mode_v: ImageAddressMode::Repeat,
..default()
}),
..default()
}
},
);
// central rectangle with not repeated texture
commands.spawn((
Mesh2d(meshes.add(Rectangle::new(RECTANGLE_SIDE, RECTANGLE_SIDE))),
MeshMaterial2d(materials.add(ColorMaterial {
texture: Some(image_with_default_sampler.clone()),
..default()
})),
Transform::from_translation(Vec3::ZERO),
children![(
Text2d::new("Control"),
Transform::from_xyz(0., LABEL_OFFSET, 0.),
)],
));
// left rectangle with repeated texture
commands.spawn((
Mesh2d(meshes.add(Rectangle::new(RECTANGLE_SIDE, RECTANGLE_SIDE))),
MeshMaterial2d(materials.add(ColorMaterial {
texture: Some(image_with_repeated_sampler),
// uv_transform used here for proportions only, but it is full Affine2
// that's why you can use rotation and shift also
uv_transform: Affine2::from_scale(Vec2::new(2., 3.)),
..default()
})),
Transform::from_xyz(-RECTANGLE_OFFSET, 0.0, 0.0),
children![(
Text2d::new("Repeat On"),
Transform::from_xyz(0., LABEL_OFFSET, 0.),
)],
));
// right rectangle with scaled texture, but with default sampler.
commands.spawn((
Mesh2d(meshes.add(Rectangle::new(RECTANGLE_SIDE, RECTANGLE_SIDE))),
MeshMaterial2d(materials.add(ColorMaterial {
// there is no sampler set, that's why
// by default you see only one small image in a row/column
// and other space is filled by image edge
texture: Some(image_with_default_sampler),
// uv_transform used here for proportions only, but it is full Affine2
// that's why you can use rotation and shift also
uv_transform: Affine2::from_scale(Vec2::new(2., 3.)),
..default()
})),
Transform::from_xyz(RECTANGLE_OFFSET, 0.0, 0.0),
children![(
Text2d::new("Repeat Off"),
Transform::from_xyz(0., LABEL_OFFSET, 0.),
)],
));
// camera
commands.spawn((
Camera2d,
Transform::default().looking_at(Vec3::ZERO, Vec3::Y),
));
}

View File

@@ -0,0 +1,50 @@
//! Shows how to render a polygonal [`Mesh`], generated from a [`Rectangle`] primitive, in a 2D scene.
//! Adds a texture and colored vertices, giving per-vertex tinting.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
asset_server: Res<AssetServer>,
) {
// Load the Bevy logo as a texture
let texture_handle = asset_server.load("branding/banner.png");
// Build a default quad mesh
let mut mesh = Mesh::from(Rectangle::default());
// Build vertex colors for the quad. One entry per vertex (the corners of the quad)
let vertex_colors: Vec<[f32; 4]> = vec![
LinearRgba::RED.to_f32_array(),
LinearRgba::GREEN.to_f32_array(),
LinearRgba::BLUE.to_f32_array(),
LinearRgba::WHITE.to_f32_array(),
];
// Insert the vertex colors as an attribute
mesh.insert_attribute(Mesh::ATTRIBUTE_COLOR, vertex_colors);
let mesh_handle = meshes.add(mesh);
commands.spawn(Camera2d);
// Spawn the quad with vertex colors
commands.spawn((
Mesh2d(mesh_handle.clone()),
MeshMaterial2d(materials.add(ColorMaterial::default())),
Transform::from_translation(Vec3::new(-96., 0., 0.)).with_scale(Vec3::splat(128.)),
));
// Spawning the quad with vertex colors and a texture results in tinting
commands.spawn((
Mesh2d(mesh_handle),
MeshMaterial2d(materials.add(texture_handle)),
Transform::from_translation(Vec3::new(96., 0., 0.)).with_scale(Vec3::splat(128.)),
));
}

44
vendor/bevy/examples/2d/move_sprite.rs vendored Normal file
View File

@@ -0,0 +1,44 @@
//! Renders a 2D scene containing a single, moving sprite.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, sprite_movement)
.run();
}
#[derive(Component)]
enum Direction {
Left,
Right,
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn(Camera2d);
commands.spawn((
Sprite::from_image(asset_server.load("branding/icon.png")),
Transform::from_xyz(0., 0., 0.),
Direction::Right,
));
}
/// The sprite is animated by changing its translation depending on the time that has passed since
/// the last frame.
fn sprite_movement(time: Res<Time>, mut sprite_position: Query<(&mut Direction, &mut Transform)>) {
for (mut logo, mut transform) in &mut sprite_position {
match *logo {
Direction::Right => transform.translation.x += 150. * time.delta_secs(),
Direction::Left => transform.translation.x -= 150. * time.delta_secs(),
}
if transform.translation.x > 200. {
*logo = Direction::Left;
} else if transform.translation.x < -200. {
*logo = Direction::Right;
}
}
}

View File

@@ -0,0 +1,159 @@
//! Shows how to create graphics that snap to the pixel grid by rendering to a texture in 2D
use bevy::{
color::palettes::css::GRAY,
prelude::*,
render::{
camera::RenderTarget,
render_resource::{
Extent3d, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
},
view::RenderLayers,
},
window::WindowResized,
};
/// In-game resolution width.
const RES_WIDTH: u32 = 160;
/// In-game resolution height.
const RES_HEIGHT: u32 = 90;
/// Default render layers for pixel-perfect rendering.
/// You can skip adding this component, as this is the default.
const PIXEL_PERFECT_LAYERS: RenderLayers = RenderLayers::layer(0);
/// Render layers for high-resolution rendering.
const HIGH_RES_LAYERS: RenderLayers = RenderLayers::layer(1);
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(ImagePlugin::default_nearest()))
.add_systems(Startup, (setup_camera, setup_sprite, setup_mesh))
.add_systems(Update, (rotate, fit_canvas))
.run();
}
/// Low-resolution texture that contains the pixel-perfect world.
/// Canvas itself is rendered to the high-resolution world.
#[derive(Component)]
struct Canvas;
/// Camera that renders the pixel-perfect world to the [`Canvas`].
#[derive(Component)]
struct InGameCamera;
/// Camera that renders the [`Canvas`] (and other graphics on [`HIGH_RES_LAYERS`]) to the screen.
#[derive(Component)]
struct OuterCamera;
#[derive(Component)]
struct Rotate;
fn setup_sprite(mut commands: Commands, asset_server: Res<AssetServer>) {
// The sample sprite that will be rendered to the pixel-perfect canvas
commands.spawn((
Sprite::from_image(asset_server.load("pixel/bevy_pixel_dark.png")),
Transform::from_xyz(-45., 20., 2.),
Rotate,
PIXEL_PERFECT_LAYERS,
));
// The sample sprite that will be rendered to the high-res "outer world"
commands.spawn((
Sprite::from_image(asset_server.load("pixel/bevy_pixel_light.png")),
Transform::from_xyz(-45., -20., 2.),
Rotate,
HIGH_RES_LAYERS,
));
}
/// Spawns a capsule mesh on the pixel-perfect layer.
fn setup_mesh(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
commands.spawn((
Mesh2d(meshes.add(Capsule2d::default())),
MeshMaterial2d(materials.add(Color::BLACK)),
Transform::from_xyz(25., 0., 2.).with_scale(Vec3::splat(32.)),
Rotate,
PIXEL_PERFECT_LAYERS,
));
}
fn setup_camera(mut commands: Commands, mut images: ResMut<Assets<Image>>) {
let canvas_size = Extent3d {
width: RES_WIDTH,
height: RES_HEIGHT,
..default()
};
// This Image serves as a canvas representing the low-resolution game screen
let mut canvas = Image {
texture_descriptor: TextureDescriptor {
label: None,
size: canvas_size,
dimension: TextureDimension::D2,
format: TextureFormat::Bgra8UnormSrgb,
mip_level_count: 1,
sample_count: 1,
usage: TextureUsages::TEXTURE_BINDING
| TextureUsages::COPY_DST
| TextureUsages::RENDER_ATTACHMENT,
view_formats: &[],
},
..default()
};
// Fill image.data with zeroes
canvas.resize(canvas_size);
let image_handle = images.add(canvas);
// This camera renders whatever is on `PIXEL_PERFECT_LAYERS` to the canvas
commands.spawn((
Camera2d,
Camera {
// Render before the "main pass" camera
order: -1,
target: RenderTarget::Image(image_handle.clone().into()),
clear_color: ClearColorConfig::Custom(GRAY.into()),
..default()
},
Msaa::Off,
InGameCamera,
PIXEL_PERFECT_LAYERS,
));
// Spawn the canvas
commands.spawn((Sprite::from_image(image_handle), Canvas, HIGH_RES_LAYERS));
// The "outer" camera renders whatever is on `HIGH_RES_LAYERS` to the screen.
// here, the canvas and one of the sample sprites will be rendered by this camera
commands.spawn((Camera2d, Msaa::Off, OuterCamera, HIGH_RES_LAYERS));
}
/// Rotates entities to demonstrate grid snapping.
fn rotate(time: Res<Time>, mut transforms: Query<&mut Transform, With<Rotate>>) {
for mut transform in &mut transforms {
let dt = time.delta_secs();
transform.rotate_z(dt);
}
}
/// Scales camera projection to fit the window (integer multiples only).
fn fit_canvas(
mut resize_events: EventReader<WindowResized>,
mut projection: Single<&mut Projection, With<OuterCamera>>,
) {
let Projection::Orthographic(projection) = &mut **projection else {
return;
};
for event in resize_events.read() {
let h_scale = event.width / RES_WIDTH as f32;
let v_scale = event.height / RES_HEIGHT as f32;
projection.scale = 1. / h_scale.min(v_scale).round();
}
}

246
vendor/bevy/examples/2d/rotation.rs vendored Normal file
View File

@@ -0,0 +1,246 @@
//! Demonstrates rotating entities in 2D using quaternions.
use bevy::{math::ops, prelude::*};
const BOUNDS: Vec2 = Vec2::new(1200.0, 640.0);
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.insert_resource(Time::<Fixed>::from_hz(60.0))
.add_systems(Startup, setup)
.add_systems(
FixedUpdate,
(
player_movement_system,
snap_to_player_system,
rotate_to_player_system,
),
)
.run();
}
/// Player component
#[derive(Component)]
struct Player {
/// Linear speed in meters per second
movement_speed: f32,
/// Rotation speed in radians per second
rotation_speed: f32,
}
/// Snap to player ship behavior
#[derive(Component)]
struct SnapToPlayer;
/// Rotate to face player ship behavior
#[derive(Component)]
struct RotateToPlayer {
/// Rotation speed in radians per second
rotation_speed: f32,
}
/// Add the game's entities to our world and creates an orthographic camera for 2D rendering.
///
/// The Bevy coordinate system is the same for 2D and 3D, in terms of 2D this means that:
///
/// * `X` axis goes from left to right (`+X` points right)
/// * `Y` axis goes from bottom to top (`+Y` point up)
/// * `Z` axis goes from far to near (`+Z` points towards you, out of the screen)
///
/// The origin is at the center of the screen.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
let ship_handle = asset_server.load("textures/simplespace/ship_C.png");
let enemy_a_handle = asset_server.load("textures/simplespace/enemy_A.png");
let enemy_b_handle = asset_server.load("textures/simplespace/enemy_B.png");
commands.spawn(Camera2d);
// Create a minimal UI explaining how to interact with the example
commands.spawn((
Text::new("Up Arrow: Move Forward\nLeft / Right Arrow: Turn"),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
let horizontal_margin = BOUNDS.x / 4.0;
let vertical_margin = BOUNDS.y / 4.0;
// Player controlled ship
commands.spawn((
Sprite::from_image(ship_handle),
Player {
movement_speed: 500.0, // Meters per second
rotation_speed: f32::to_radians(360.0), // Degrees per second
},
));
// Enemy that snaps to face the player spawns on the bottom and left
commands.spawn((
Sprite::from_image(enemy_a_handle.clone()),
Transform::from_xyz(0.0 - horizontal_margin, 0.0, 0.0),
SnapToPlayer,
));
commands.spawn((
Sprite::from_image(enemy_a_handle),
Transform::from_xyz(0.0, 0.0 - vertical_margin, 0.0),
SnapToPlayer,
));
// Enemy that rotates to face the player enemy spawns on the top and right
commands.spawn((
Sprite::from_image(enemy_b_handle.clone()),
Transform::from_xyz(0.0 + horizontal_margin, 0.0, 0.0),
RotateToPlayer {
rotation_speed: f32::to_radians(45.0), // Degrees per second
},
));
commands.spawn((
Sprite::from_image(enemy_b_handle),
Transform::from_xyz(0.0, 0.0 + vertical_margin, 0.0),
RotateToPlayer {
rotation_speed: f32::to_radians(90.0), // Degrees per second
},
));
}
/// Demonstrates applying rotation and movement based on keyboard input.
fn player_movement_system(
time: Res<Time>,
keyboard_input: Res<ButtonInput<KeyCode>>,
query: Single<(&Player, &mut Transform)>,
) {
let (ship, mut transform) = query.into_inner();
let mut rotation_factor = 0.0;
let mut movement_factor = 0.0;
if keyboard_input.pressed(KeyCode::ArrowLeft) {
rotation_factor += 1.0;
}
if keyboard_input.pressed(KeyCode::ArrowRight) {
rotation_factor -= 1.0;
}
if keyboard_input.pressed(KeyCode::ArrowUp) {
movement_factor += 1.0;
}
// Update the ship rotation around the Z axis (perpendicular to the 2D plane of the screen)
transform.rotate_z(rotation_factor * ship.rotation_speed * time.delta_secs());
// Get the ship's forward vector by applying the current rotation to the ships initial facing
// vector
let movement_direction = transform.rotation * Vec3::Y;
// Get the distance the ship will move based on direction, the ship's movement speed and delta
// time
let movement_distance = movement_factor * ship.movement_speed * time.delta_secs();
// Create the change in translation using the new movement direction and distance
let translation_delta = movement_direction * movement_distance;
// Update the ship translation with our new translation delta
transform.translation += translation_delta;
// Bound the ship within the invisible level bounds
let extents = Vec3::from((BOUNDS / 2.0, 0.0));
transform.translation = transform.translation.min(extents).max(-extents);
}
/// Demonstrates snapping the enemy ship to face the player ship immediately.
fn snap_to_player_system(
mut query: Query<&mut Transform, (With<SnapToPlayer>, Without<Player>)>,
player_transform: Single<&Transform, With<Player>>,
) {
// Get the player translation in 2D
let player_translation = player_transform.translation.xy();
for mut enemy_transform in &mut query {
// Get the vector from the enemy ship to the player ship in 2D and normalize it.
let to_player = (player_translation - enemy_transform.translation.xy()).normalize();
// Get the quaternion to rotate from the initial enemy facing direction to the direction
// facing the player
let rotate_to_player = Quat::from_rotation_arc(Vec3::Y, to_player.extend(0.));
// Rotate the enemy to face the player
enemy_transform.rotation = rotate_to_player;
}
}
/// Demonstrates rotating an enemy ship to face the player ship at a given rotation speed.
///
/// This method uses the vector dot product to determine if the enemy is facing the player and
/// if not, which way to rotate to face the player. The dot product on two unit length vectors
/// will return a value between -1.0 and +1.0 which tells us the following about the two vectors:
///
/// * If the result is 1.0 the vectors are pointing in the same direction, the angle between them is
/// 0 degrees.
/// * If the result is 0.0 the vectors are perpendicular, the angle between them is 90 degrees.
/// * If the result is -1.0 the vectors are parallel but pointing in opposite directions, the angle
/// between them is 180 degrees.
/// * If the result is positive the vectors are pointing in roughly the same direction, the angle
/// between them is greater than 0 and less than 90 degrees.
/// * If the result is negative the vectors are pointing in roughly opposite directions, the angle
/// between them is greater than 90 and less than 180 degrees.
///
/// It is possible to get the angle by taking the arc cosine (`acos`) of the dot product. It is
/// often unnecessary to do this though. Beware than `acos` will return `NaN` if the input is less
/// than -1.0 or greater than 1.0. This can happen even when working with unit vectors due to
/// floating point precision loss, so it pays to clamp your dot product value before calling
/// `acos`.
fn rotate_to_player_system(
time: Res<Time>,
mut query: Query<(&RotateToPlayer, &mut Transform), Without<Player>>,
player_transform: Single<&Transform, With<Player>>,
) {
// Get the player translation in 2D
let player_translation = player_transform.translation.xy();
for (config, mut enemy_transform) in &mut query {
// Get the enemy ship forward vector in 2D (already unit length)
let enemy_forward = (enemy_transform.rotation * Vec3::Y).xy();
// Get the vector from the enemy ship to the player ship in 2D and normalize it.
let to_player = (player_translation - enemy_transform.translation.xy()).normalize();
// Get the dot product between the enemy forward vector and the direction to the player.
let forward_dot_player = enemy_forward.dot(to_player);
// If the dot product is approximately 1.0 then the enemy is already facing the player and
// we can early out.
if (forward_dot_player - 1.0).abs() < f32::EPSILON {
continue;
}
// Get the right vector of the enemy ship in 2D (already unit length)
let enemy_right = (enemy_transform.rotation * Vec3::X).xy();
// Get the dot product of the enemy right vector and the direction to the player ship.
// If the dot product is negative them we need to rotate counter clockwise, if it is
// positive we need to rotate clockwise. Note that `copysign` will still return 1.0 if the
// dot product is 0.0 (because the player is directly behind the enemy, so perpendicular
// with the right vector).
let right_dot_player = enemy_right.dot(to_player);
// Determine the sign of rotation from the right dot player. We need to negate the sign
// here as the 2D bevy co-ordinate system rotates around +Z, which is pointing out of the
// screen. Due to the right hand rule, positive rotation around +Z is counter clockwise and
// negative is clockwise.
let rotation_sign = -f32::copysign(1.0, right_dot_player);
// Limit rotation so we don't overshoot the target. We need to convert our dot product to
// an angle here so we can get an angle of rotation to clamp against.
let max_angle = ops::acos(forward_dot_player.clamp(-1.0, 1.0)); // Clamp acos for safety
// Calculate angle of rotation with limit
let rotation_angle =
rotation_sign * (config.rotation_speed * time.delta_secs()).min(max_angle);
// Rotate the enemy to face the player
enemy_transform.rotate_z(rotation_angle);
}
}

18
vendor/bevy/examples/2d/sprite.rs vendored Normal file
View File

@@ -0,0 +1,18 @@
//! Displays a single [`Sprite`], created from an image.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn(Camera2d);
commands.spawn(Sprite::from_image(
asset_server.load("branding/bevy_bird_dark.png"),
));
}

View File

@@ -0,0 +1,145 @@
//! Animates a sprite in response to a keyboard event.
//!
//! See `sprite_sheet.rs` for an example where the sprite animation loops indefinitely.
use std::time::Duration;
use bevy::{input::common_conditions::input_just_pressed, prelude::*};
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(ImagePlugin::default_nearest())) // prevents blurry sprites
.add_systems(Startup, setup)
.add_systems(Update, execute_animations)
.add_systems(
Update,
(
// Press the right arrow key to animate the right sprite
trigger_animation::<RightSprite>.run_if(input_just_pressed(KeyCode::ArrowRight)),
// Press the left arrow key to animate the left sprite
trigger_animation::<LeftSprite>.run_if(input_just_pressed(KeyCode::ArrowLeft)),
),
)
.run();
}
// This system runs when the user clicks the left arrow key or right arrow key
fn trigger_animation<S: Component>(mut animation: Single<&mut AnimationConfig, With<S>>) {
// We create a new timer when the animation is triggered
animation.frame_timer = AnimationConfig::timer_from_fps(animation.fps);
}
#[derive(Component)]
struct AnimationConfig {
first_sprite_index: usize,
last_sprite_index: usize,
fps: u8,
frame_timer: Timer,
}
impl AnimationConfig {
fn new(first: usize, last: usize, fps: u8) -> Self {
Self {
first_sprite_index: first,
last_sprite_index: last,
fps,
frame_timer: Self::timer_from_fps(fps),
}
}
fn timer_from_fps(fps: u8) -> Timer {
Timer::new(Duration::from_secs_f32(1.0 / (fps as f32)), TimerMode::Once)
}
}
// This system loops through all the sprites in the `TextureAtlas`, from `first_sprite_index` to
// `last_sprite_index` (both defined in `AnimationConfig`).
fn execute_animations(time: Res<Time>, mut query: Query<(&mut AnimationConfig, &mut Sprite)>) {
for (mut config, mut sprite) in &mut query {
// We track how long the current sprite has been displayed for
config.frame_timer.tick(time.delta());
// If it has been displayed for the user-defined amount of time (fps)...
if config.frame_timer.just_finished() {
if let Some(atlas) = &mut sprite.texture_atlas {
if atlas.index == config.last_sprite_index {
// ...and it IS the last frame, then we move back to the first frame and stop.
atlas.index = config.first_sprite_index;
} else {
// ...and it is NOT the last frame, then we move to the next frame...
atlas.index += 1;
// ...and reset the frame timer to start counting all over again
config.frame_timer = AnimationConfig::timer_from_fps(config.fps);
}
}
}
}
}
#[derive(Component)]
struct LeftSprite;
#[derive(Component)]
struct RightSprite;
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut texture_atlas_layouts: ResMut<Assets<TextureAtlasLayout>>,
) {
commands.spawn(Camera2d);
// Create a minimal UI explaining how to interact with the example
commands.spawn((
Text::new("Left Arrow: Animate Left Sprite\nRight Arrow: Animate Right Sprite"),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
// Load the sprite sheet using the `AssetServer`
let texture = asset_server.load("textures/rpg/chars/gabe/gabe-idle-run.png");
// The sprite sheet has 7 sprites arranged in a row, and they are all 24px x 24px
let layout = TextureAtlasLayout::from_grid(UVec2::splat(24), 7, 1, None, None);
let texture_atlas_layout = texture_atlas_layouts.add(layout);
// The first (left-hand) sprite runs at 10 FPS
let animation_config_1 = AnimationConfig::new(1, 6, 10);
// Create the first (left-hand) sprite
commands.spawn((
Sprite {
image: texture.clone(),
texture_atlas: Some(TextureAtlas {
layout: texture_atlas_layout.clone(),
index: animation_config_1.first_sprite_index,
}),
..default()
},
Transform::from_scale(Vec3::splat(6.0)).with_translation(Vec3::new(-70.0, 0.0, 0.0)),
LeftSprite,
animation_config_1,
));
// The second (right-hand) sprite runs at 20 FPS
let animation_config_2 = AnimationConfig::new(1, 6, 20);
// Create the second (right-hand) sprite
commands.spawn((
Sprite {
image: texture.clone(),
texture_atlas: Some(TextureAtlas {
layout: texture_atlas_layout.clone(),
index: animation_config_2.first_sprite_index,
}),
..Default::default()
},
Transform::from_scale(Vec3::splat(6.0)).with_translation(Vec3::new(70.0, 0.0, 0.0)),
RightSprite,
animation_config_2,
));
}

View File

@@ -0,0 +1,23 @@
//! Displays a single [`Sprite`], created from an image, but flipped on one axis.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn(Camera2d);
commands.spawn(Sprite {
image: asset_server.load("branding/bevy_bird_dark.png"),
// Flip the logo to the left
flip_x: true,
// And don't flip it upside-down ( the default )
flip_y: false,
..Default::default()
});
}

332
vendor/bevy/examples/2d/sprite_scale.rs vendored Normal file
View File

@@ -0,0 +1,332 @@
//! Shows how to use sprite scaling to fill and fit textures into the sprite.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(
Startup,
(setup_sprites, setup_texture_atlas).after(setup_camera),
)
.add_systems(Update, animate_sprite)
.run();
}
fn setup_camera(mut commands: Commands) {
commands.spawn(Camera2d);
}
fn setup_sprites(mut commands: Commands, asset_server: Res<AssetServer>) {
let square = asset_server.load("textures/slice_square_2.png");
let banner = asset_server.load("branding/banner.png");
let rects = [
Rect {
size: Vec2::new(100., 225.),
text: "Stretched".to_string(),
transform: Transform::from_translation(Vec3::new(-570., 230., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Auto,
},
Rect {
size: Vec2::new(100., 225.),
text: "Fill Center".to_string(),
transform: Transform::from_translation(Vec3::new(-450., 230., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillCenter),
},
Rect {
size: Vec2::new(100., 225.),
text: "Fill Start".to_string(),
transform: Transform::from_translation(Vec3::new(-330., 230., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillStart),
},
Rect {
size: Vec2::new(100., 225.),
text: "Fill End".to_string(),
transform: Transform::from_translation(Vec3::new(-210., 230., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillEnd),
},
Rect {
size: Vec2::new(300., 100.),
text: "Fill Start Horizontal".to_string(),
transform: Transform::from_translation(Vec3::new(10., 290., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillStart),
},
Rect {
size: Vec2::new(300., 100.),
text: "Fill End Horizontal".to_string(),
transform: Transform::from_translation(Vec3::new(10., 155., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillEnd),
},
Rect {
size: Vec2::new(200., 200.),
text: "Fill Center".to_string(),
transform: Transform::from_translation(Vec3::new(280., 230., 0.)),
texture: banner.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillCenter),
},
Rect {
size: Vec2::new(200., 100.),
text: "Fill Center".to_string(),
transform: Transform::from_translation(Vec3::new(500., 230., 0.)),
texture: square.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillCenter),
},
Rect {
size: Vec2::new(100., 100.),
text: "Stretched".to_string(),
transform: Transform::from_translation(Vec3::new(-570., -40., 0.)),
texture: banner.clone(),
image_mode: SpriteImageMode::Auto,
},
Rect {
size: Vec2::new(200., 200.),
text: "Fit Center".to_string(),
transform: Transform::from_translation(Vec3::new(-400., -40., 0.)),
texture: banner.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitCenter),
},
Rect {
size: Vec2::new(200., 200.),
text: "Fit Start".to_string(),
transform: Transform::from_translation(Vec3::new(-180., -40., 0.)),
texture: banner.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitStart),
},
Rect {
size: Vec2::new(200., 200.),
text: "Fit End".to_string(),
transform: Transform::from_translation(Vec3::new(40., -40., 0.)),
texture: banner.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitEnd),
},
Rect {
size: Vec2::new(100., 200.),
text: "Fit Center".to_string(),
transform: Transform::from_translation(Vec3::new(210., -40., 0.)),
texture: banner.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitCenter),
},
];
for rect in rects {
let mut cmd = commands.spawn((
Sprite {
image: rect.texture,
custom_size: Some(rect.size),
image_mode: rect.image_mode,
..default()
},
rect.transform,
));
cmd.with_children(|builder| {
builder.spawn((
Text2d::new(rect.text),
TextLayout::new_with_justify(JustifyText::Center),
TextFont::from_font_size(15.),
Transform::from_xyz(0., -0.5 * rect.size.y - 10., 0.),
bevy::sprite::Anchor::TopCenter,
));
});
}
}
fn setup_texture_atlas(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut texture_atlas_layouts: ResMut<Assets<TextureAtlasLayout>>,
) {
commands.spawn(Camera2d);
let gabe = asset_server.load("textures/rpg/chars/gabe/gabe-idle-run.png");
let animation_indices_gabe = AnimationIndices { first: 0, last: 6 };
let gabe_atlas = TextureAtlas {
layout: texture_atlas_layouts.add(TextureAtlasLayout::from_grid(
UVec2::splat(24),
7,
1,
None,
None,
)),
index: animation_indices_gabe.first,
};
let sprite_sheets = [
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Stretched".to_string(),
transform: Transform::from_translation(Vec3::new(-570., -200., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Auto,
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Fill Center".to_string(),
transform: Transform::from_translation(Vec3::new(-570., -300., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillCenter),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Fill Start".to_string(),
transform: Transform::from_translation(Vec3::new(-430., -200., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillStart),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Fill End".to_string(),
transform: Transform::from_translation(Vec3::new(-430., -300., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillEnd),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(50., 120.),
text: "Fill Center".to_string(),
transform: Transform::from_translation(Vec3::new(-300., -250., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillCenter),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(50., 120.),
text: "Fill Start".to_string(),
transform: Transform::from_translation(Vec3::new(-190., -250., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillStart),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(50., 120.),
text: "Fill End".to_string(),
transform: Transform::from_translation(Vec3::new(-90., -250., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FillEnd),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Fit Center".to_string(),
transform: Transform::from_translation(Vec3::new(20., -200., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitCenter),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Fit Start".to_string(),
transform: Transform::from_translation(Vec3::new(20., -300., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitStart),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
SpriteSheet {
size: Vec2::new(120., 50.),
text: "Fit End".to_string(),
transform: Transform::from_translation(Vec3::new(160., -200., 0.)),
texture: gabe.clone(),
image_mode: SpriteImageMode::Scale(ScalingMode::FitEnd),
atlas: gabe_atlas.clone(),
indices: animation_indices_gabe.clone(),
timer: AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
},
];
for sprite_sheet in sprite_sheets {
let mut cmd = commands.spawn((
Sprite {
image_mode: sprite_sheet.image_mode,
custom_size: Some(sprite_sheet.size),
..Sprite::from_atlas_image(sprite_sheet.texture.clone(), sprite_sheet.atlas.clone())
},
sprite_sheet.indices,
sprite_sheet.timer,
sprite_sheet.transform,
));
cmd.with_children(|builder| {
builder.spawn((
Text2d::new(sprite_sheet.text),
TextLayout::new_with_justify(JustifyText::Center),
TextFont::from_font_size(15.),
Transform::from_xyz(0., -0.5 * sprite_sheet.size.y - 10., 0.),
bevy::sprite::Anchor::TopCenter,
));
});
}
}
struct Rect {
size: Vec2,
text: String,
transform: Transform,
texture: Handle<Image>,
image_mode: SpriteImageMode,
}
struct SpriteSheet {
size: Vec2,
text: String,
transform: Transform,
texture: Handle<Image>,
image_mode: SpriteImageMode,
atlas: TextureAtlas,
indices: AnimationIndices,
timer: AnimationTimer,
}
#[derive(Component, Clone)]
struct AnimationIndices {
first: usize,
last: usize,
}
#[derive(Component, Deref, DerefMut)]
struct AnimationTimer(Timer);
fn animate_sprite(
time: Res<Time>,
mut query: Query<(&AnimationIndices, &mut AnimationTimer, &mut Sprite)>,
) {
for (indices, mut timer, mut sprite) in &mut query {
timer.tick(time.delta());
if timer.just_finished() {
if let Some(atlas) = &mut sprite.texture_atlas {
atlas.index = if atlas.index == indices.last {
indices.first
} else {
atlas.index + 1
};
}
}
}
}

67
vendor/bevy/examples/2d/sprite_sheet.rs vendored Normal file
View File

@@ -0,0 +1,67 @@
//! Renders an animated sprite by loading all animation frames from a single image (a sprite sheet)
//! into a texture atlas, and changing the displayed image periodically.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(ImagePlugin::default_nearest())) // prevents blurry sprites
.add_systems(Startup, setup)
.add_systems(Update, animate_sprite)
.run();
}
#[derive(Component)]
struct AnimationIndices {
first: usize,
last: usize,
}
#[derive(Component, Deref, DerefMut)]
struct AnimationTimer(Timer);
fn animate_sprite(
time: Res<Time>,
mut query: Query<(&AnimationIndices, &mut AnimationTimer, &mut Sprite)>,
) {
for (indices, mut timer, mut sprite) in &mut query {
timer.tick(time.delta());
if timer.just_finished() {
if let Some(atlas) = &mut sprite.texture_atlas {
atlas.index = if atlas.index == indices.last {
indices.first
} else {
atlas.index + 1
};
}
}
}
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut texture_atlas_layouts: ResMut<Assets<TextureAtlasLayout>>,
) {
let texture = asset_server.load("textures/rpg/chars/gabe/gabe-idle-run.png");
let layout = TextureAtlasLayout::from_grid(UVec2::splat(24), 7, 1, None, None);
let texture_atlas_layout = texture_atlas_layouts.add(layout);
// Use only the subset of sprites in the sheet that make up the run animation
let animation_indices = AnimationIndices { first: 1, last: 6 };
commands.spawn(Camera2d);
commands.spawn((
Sprite::from_atlas_image(
texture,
TextureAtlas {
layout: texture_atlas_layout,
index: animation_indices.first,
},
),
Transform::from_scale(Vec3::splat(6.0)),
animation_indices,
AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)),
));
}

136
vendor/bevy/examples/2d/sprite_slice.rs vendored Normal file
View File

@@ -0,0 +1,136 @@
//! Showcases sprite 9 slice scaling and tiling features, enabling usage of
//! sprites in multiple resolutions while keeping it in proportion
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn spawn_sprites(
commands: &mut Commands,
texture_handle: Handle<Image>,
mut position: Vec3,
slice_border: f32,
style: TextFont,
gap: f32,
) {
let cases = [
// Reference sprite
(
"Original",
style.clone(),
Vec2::splat(100.0),
SpriteImageMode::Auto,
),
// Scaled regular sprite
(
"Stretched",
style.clone(),
Vec2::new(100.0, 200.0),
SpriteImageMode::Auto,
),
// Stretched Scaled sliced sprite
(
"With Slicing",
style.clone(),
Vec2::new(100.0, 200.0),
SpriteImageMode::Sliced(TextureSlicer {
border: BorderRect::all(slice_border),
center_scale_mode: SliceScaleMode::Stretch,
..default()
}),
),
// Scaled sliced sprite
(
"With Tiling",
style.clone(),
Vec2::new(100.0, 200.0),
SpriteImageMode::Sliced(TextureSlicer {
border: BorderRect::all(slice_border),
center_scale_mode: SliceScaleMode::Tile { stretch_value: 0.5 },
sides_scale_mode: SliceScaleMode::Tile { stretch_value: 0.2 },
..default()
}),
),
// Scaled sliced sprite horizontally
(
"With Tiling",
style.clone(),
Vec2::new(300.0, 200.0),
SpriteImageMode::Sliced(TextureSlicer {
border: BorderRect::all(slice_border),
center_scale_mode: SliceScaleMode::Tile { stretch_value: 0.2 },
sides_scale_mode: SliceScaleMode::Tile { stretch_value: 0.3 },
..default()
}),
),
// Scaled sliced sprite horizontally with max scale
(
"With Corners Constrained",
style,
Vec2::new(300.0, 200.0),
SpriteImageMode::Sliced(TextureSlicer {
border: BorderRect::all(slice_border),
center_scale_mode: SliceScaleMode::Tile { stretch_value: 0.1 },
sides_scale_mode: SliceScaleMode::Tile { stretch_value: 0.2 },
max_corner_scale: 0.2,
}),
),
];
for (label, text_style, size, scale_mode) in cases {
position.x += 0.5 * size.x;
commands.spawn((
Sprite {
image: texture_handle.clone(),
custom_size: Some(size),
image_mode: scale_mode,
..default()
},
Transform::from_translation(position),
children![(
Text2d::new(label),
text_style,
TextLayout::new_with_justify(JustifyText::Center),
Transform::from_xyz(0., -0.5 * size.y - 10., 0.0),
bevy::sprite::Anchor::TopCenter,
)],
));
position.x += 0.5 * size.x + gap;
}
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn(Camera2d);
let font = asset_server.load("fonts/FiraSans-Bold.ttf");
let style = TextFont {
font: font.clone(),
..default()
};
// Load textures
let handle_1 = asset_server.load("textures/slice_square.png");
let handle_2 = asset_server.load("textures/slice_square_2.png");
spawn_sprites(
&mut commands,
handle_1,
Vec3::new(-600.0, 150.0, 0.0),
200.0,
style.clone(),
40.,
);
spawn_sprites(
&mut commands,
handle_2,
Vec3::new(-600.0, -150.0, 0.0),
80.0,
style,
40.,
);
}

49
vendor/bevy/examples/2d/sprite_tile.rs vendored Normal file
View File

@@ -0,0 +1,49 @@
//! Displays a single [`Sprite`] tiled in a grid, with a scaling animation
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, animate)
.run();
}
#[derive(Resource)]
struct AnimationState {
min: f32,
max: f32,
current: f32,
speed: f32,
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn(Camera2d);
commands.insert_resource(AnimationState {
min: 128.0,
max: 512.0,
current: 128.0,
speed: 50.0,
});
commands.spawn(Sprite {
image: asset_server.load("branding/icon.png"),
image_mode: SpriteImageMode::Tiled {
tile_x: true,
tile_y: true,
stretch_value: 0.5, // The image will tile every 128px
},
..default()
});
}
fn animate(mut sprites: Query<&mut Sprite>, mut state: ResMut<AnimationState>, time: Res<Time>) {
if state.current >= state.max || state.current <= state.min {
state.speed = -state.speed;
};
state.current += state.speed * time.delta_secs();
for mut sprite in &mut sprites {
sprite.custom_size = Some(Vec2::splat(state.current));
}
}

187
vendor/bevy/examples/2d/text2d.rs vendored Normal file
View File

@@ -0,0 +1,187 @@
//! Shows text rendering with moving, rotating and scaling text.
//!
//! Note that this uses [`Text2d`] to display text alongside your other entities in a 2D scene.
//!
//! For an example on how to render text as part of a user interface, independent from the world
//! viewport, you may want to look at `games/contributors.rs` or `ui/text.rs`.
use bevy::{
color::palettes::css::*,
math::ops,
prelude::*,
sprite::Anchor,
text::{FontSmoothing, LineBreak, TextBounds},
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(
Update,
(animate_translation, animate_rotation, animate_scale),
)
.run();
}
#[derive(Component)]
struct AnimateTranslation;
#[derive(Component)]
struct AnimateRotation;
#[derive(Component)]
struct AnimateScale;
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
let font = asset_server.load("fonts/FiraSans-Bold.ttf");
let text_font = TextFont {
font: font.clone(),
font_size: 50.0,
..default()
};
let text_justification = JustifyText::Center;
commands.spawn(Camera2d);
// Demonstrate changing translation
commands.spawn((
Text2d::new("translation"),
text_font.clone(),
TextLayout::new_with_justify(text_justification),
AnimateTranslation,
));
// Demonstrate changing rotation
commands.spawn((
Text2d::new("rotation"),
text_font.clone(),
TextLayout::new_with_justify(text_justification),
AnimateRotation,
));
// Demonstrate changing scale
commands.spawn((
Text2d::new("scale"),
text_font,
TextLayout::new_with_justify(text_justification),
Transform::from_translation(Vec3::new(400.0, 0.0, 0.0)),
AnimateScale,
));
// Demonstrate text wrapping
let slightly_smaller_text_font = TextFont {
font,
font_size: 35.0,
..default()
};
let box_size = Vec2::new(300.0, 200.0);
let box_position = Vec2::new(0.0, -250.0);
commands
.spawn((
Sprite::from_color(Color::srgb(0.25, 0.25, 0.55), box_size),
Transform::from_translation(box_position.extend(0.0)),
))
.with_children(|builder| {
builder.spawn((
Text2d::new("this text wraps in the box\n(Unicode linebreaks)"),
slightly_smaller_text_font.clone(),
TextLayout::new(JustifyText::Left, LineBreak::WordBoundary),
// Wrap text in the rectangle
TextBounds::from(box_size),
// Ensure the text is drawn on top of the box
Transform::from_translation(Vec3::Z),
));
});
let other_box_size = Vec2::new(300.0, 200.0);
let other_box_position = Vec2::new(320.0, -250.0);
commands
.spawn((
Sprite::from_color(Color::srgb(0.25, 0.25, 0.55), other_box_size),
Transform::from_translation(other_box_position.extend(0.0)),
))
.with_children(|builder| {
builder.spawn((
Text2d::new("this text wraps in the box\n(AnyCharacter linebreaks)"),
slightly_smaller_text_font.clone(),
TextLayout::new(JustifyText::Left, LineBreak::AnyCharacter),
// Wrap text in the rectangle
TextBounds::from(other_box_size),
// Ensure the text is drawn on top of the box
Transform::from_translation(Vec3::Z),
));
});
// Demonstrate font smoothing off
commands.spawn((
Text2d::new("This text has\nFontSmoothing::None\nAnd JustifyText::Center"),
slightly_smaller_text_font
.clone()
.with_font_smoothing(FontSmoothing::None),
TextLayout::new_with_justify(JustifyText::Center),
Transform::from_translation(Vec3::new(-400.0, -250.0, 0.0)),
));
commands
.spawn((
Sprite {
color: Color::Srgba(LIGHT_CYAN),
custom_size: Some(Vec2::new(10., 10.)),
..Default::default()
},
Transform::from_translation(250. * Vec3::Y),
))
.with_children(|commands| {
for (text_anchor, color) in [
(Anchor::TopLeft, Color::Srgba(LIGHT_SALMON)),
(Anchor::TopRight, Color::Srgba(LIGHT_GREEN)),
(Anchor::BottomRight, Color::Srgba(LIGHT_BLUE)),
(Anchor::BottomLeft, Color::Srgba(LIGHT_YELLOW)),
] {
commands
.spawn((
Text2d::new(" Anchor".to_string()),
slightly_smaller_text_font.clone(),
text_anchor,
))
.with_child((
TextSpan("::".to_string()),
slightly_smaller_text_font.clone(),
TextColor(LIGHT_GREY.into()),
))
.with_child((
TextSpan(format!("{text_anchor:?} ")),
slightly_smaller_text_font.clone(),
TextColor(color),
));
}
});
}
fn animate_translation(
time: Res<Time>,
mut query: Query<&mut Transform, (With<Text2d>, With<AnimateTranslation>)>,
) {
for mut transform in &mut query {
transform.translation.x = 100.0 * ops::sin(time.elapsed_secs()) - 400.0;
transform.translation.y = 100.0 * ops::cos(time.elapsed_secs());
}
}
fn animate_rotation(
time: Res<Time>,
mut query: Query<&mut Transform, (With<Text2d>, With<AnimateRotation>)>,
) {
for mut transform in &mut query {
transform.rotation = Quat::from_rotation_z(ops::cos(time.elapsed_secs()));
}
}
fn animate_scale(
time: Res<Time>,
mut query: Query<&mut Transform, (With<Text2d>, With<AnimateScale>)>,
) {
// Consider changing font-size instead of scaling the transform. Scaling a Text2D will scale the
// rendered quad, resulting in a pixellated look.
for mut transform in &mut query {
let scale = (ops::sin(time.elapsed_secs()) + 1.1) * 2.0;
transform.scale.x = scale;
transform.scale.y = scale;
}
}

288
vendor/bevy/examples/2d/texture_atlas.rs vendored Normal file
View File

@@ -0,0 +1,288 @@
//! In this example we generate four texture atlases (sprite sheets) from a folder containing
//! individual sprites.
//!
//! The texture atlases are generated with different padding and sampling to demonstrate the
//! effect of these settings, and how bleeding issues can be resolved by padding the sprites.
//!
//! Only one padded and one unpadded texture atlas are rendered to the screen.
//! An upscaled sprite from each of the four atlases are rendered to the screen.
use bevy::{asset::LoadedFolder, image::ImageSampler, prelude::*};
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(ImagePlugin::default_nearest())) // fallback to nearest sampling
.init_state::<AppState>()
.add_systems(OnEnter(AppState::Setup), load_textures)
.add_systems(Update, check_textures.run_if(in_state(AppState::Setup)))
.add_systems(OnEnter(AppState::Finished), setup)
.run();
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash, States)]
enum AppState {
#[default]
Setup,
Finished,
}
#[derive(Resource, Default)]
struct RpgSpriteFolder(Handle<LoadedFolder>);
fn load_textures(mut commands: Commands, asset_server: Res<AssetServer>) {
// Load multiple, individual sprites from a folder
commands.insert_resource(RpgSpriteFolder(asset_server.load_folder("textures/rpg")));
}
fn check_textures(
mut next_state: ResMut<NextState<AppState>>,
rpg_sprite_folder: Res<RpgSpriteFolder>,
mut events: EventReader<AssetEvent<LoadedFolder>>,
) {
// Advance the `AppState` once all sprite handles have been loaded by the `AssetServer`
for event in events.read() {
if event.is_loaded_with_dependencies(&rpg_sprite_folder.0) {
next_state.set(AppState::Finished);
}
}
}
fn setup(
mut commands: Commands,
rpg_sprite_handles: Res<RpgSpriteFolder>,
asset_server: Res<AssetServer>,
mut texture_atlases: ResMut<Assets<TextureAtlasLayout>>,
loaded_folders: Res<Assets<LoadedFolder>>,
mut textures: ResMut<Assets<Image>>,
) {
let loaded_folder = loaded_folders.get(&rpg_sprite_handles.0).unwrap();
// Create texture atlases with different padding and sampling
let (texture_atlas_linear, linear_sources, linear_texture) = create_texture_atlas(
loaded_folder,
None,
Some(ImageSampler::linear()),
&mut textures,
);
let atlas_linear_handle = texture_atlases.add(texture_atlas_linear);
let (texture_atlas_nearest, nearest_sources, nearest_texture) = create_texture_atlas(
loaded_folder,
None,
Some(ImageSampler::nearest()),
&mut textures,
);
let atlas_nearest_handle = texture_atlases.add(texture_atlas_nearest);
let (texture_atlas_linear_padded, linear_padded_sources, linear_padded_texture) =
create_texture_atlas(
loaded_folder,
Some(UVec2::new(6, 6)),
Some(ImageSampler::linear()),
&mut textures,
);
let atlas_linear_padded_handle = texture_atlases.add(texture_atlas_linear_padded.clone());
let (texture_atlas_nearest_padded, nearest_padded_sources, nearest_padded_texture) =
create_texture_atlas(
loaded_folder,
Some(UVec2::new(6, 6)),
Some(ImageSampler::nearest()),
&mut textures,
);
let atlas_nearest_padded_handle = texture_atlases.add(texture_atlas_nearest_padded);
commands.spawn(Camera2d);
// Padded textures are to the right, unpadded to the left
// Draw unpadded texture atlas
commands.spawn((
Sprite::from_image(linear_texture.clone()),
Transform {
translation: Vec3::new(-250.0, -160.0, 0.0),
scale: Vec3::splat(0.5),
..default()
},
));
// Draw padded texture atlas
commands.spawn((
Sprite::from_image(linear_padded_texture.clone()),
Transform {
translation: Vec3::new(250.0, -160.0, 0.0),
scale: Vec3::splat(0.5),
..default()
},
));
let font = asset_server.load("fonts/FiraSans-Bold.ttf");
// Padding label text style
let text_style: TextFont = TextFont {
font: font.clone(),
font_size: 42.0,
..default()
};
// Labels to indicate padding
// No padding
create_label(
&mut commands,
(-250.0, 250.0, 0.0),
"No padding",
text_style.clone(),
);
// Padding
create_label(&mut commands, (250.0, 250.0, 0.0), "Padding", text_style);
// Get handle to a sprite to render
let vendor_handle: Handle<Image> = asset_server
.get_handle("textures/rpg/chars/vendor/generic-rpg-vendor.png")
.unwrap();
// Configuration array to render sprites through iteration
let configurations: [(
&str,
Handle<TextureAtlasLayout>,
TextureAtlasSources,
Handle<Image>,
f32,
); 4] = [
(
"Linear",
atlas_linear_handle,
linear_sources,
linear_texture,
-350.0,
),
(
"Nearest",
atlas_nearest_handle,
nearest_sources,
nearest_texture,
-150.0,
),
(
"Linear",
atlas_linear_padded_handle,
linear_padded_sources,
linear_padded_texture,
150.0,
),
(
"Nearest",
atlas_nearest_padded_handle,
nearest_padded_sources,
nearest_padded_texture,
350.0,
),
];
// Label text style
let sampling_label_style = TextFont {
font,
font_size: 25.0,
..default()
};
let base_y = 80.0; // y position of the sprites
for (sampling, atlas_handle, atlas_sources, atlas_texture, x) in configurations {
// Render a sprite from the texture_atlas
create_sprite_from_atlas(
&mut commands,
(x, base_y, 0.0),
atlas_texture,
atlas_sources,
atlas_handle,
&vendor_handle,
);
// Render a label to indicate the sampling setting
create_label(
&mut commands,
(x, base_y + 110.0, 0.0), // Offset to y position of the sprite
sampling,
sampling_label_style.clone(),
);
}
}
/// Create a texture atlas with the given padding and sampling settings
/// from the individual sprites in the given folder.
fn create_texture_atlas(
folder: &LoadedFolder,
padding: Option<UVec2>,
sampling: Option<ImageSampler>,
textures: &mut ResMut<Assets<Image>>,
) -> (TextureAtlasLayout, TextureAtlasSources, Handle<Image>) {
// Build a texture atlas using the individual sprites
let mut texture_atlas_builder = TextureAtlasBuilder::default();
texture_atlas_builder.padding(padding.unwrap_or_default());
for handle in folder.handles.iter() {
let id = handle.id().typed_unchecked::<Image>();
let Some(texture) = textures.get(id) else {
warn!(
"{} did not resolve to an `Image` asset.",
handle.path().unwrap()
);
continue;
};
texture_atlas_builder.add_texture(Some(id), texture);
}
let (texture_atlas_layout, texture_atlas_sources, texture) =
texture_atlas_builder.build().unwrap();
let texture = textures.add(texture);
// Update the sampling settings of the texture atlas
let image = textures.get_mut(&texture).unwrap();
image.sampler = sampling.unwrap_or_default();
(texture_atlas_layout, texture_atlas_sources, texture)
}
/// Create and spawn a sprite from a texture atlas
fn create_sprite_from_atlas(
commands: &mut Commands,
translation: (f32, f32, f32),
atlas_texture: Handle<Image>,
atlas_sources: TextureAtlasSources,
atlas_handle: Handle<TextureAtlasLayout>,
vendor_handle: &Handle<Image>,
) {
commands.spawn((
Transform {
translation: Vec3::new(translation.0, translation.1, translation.2),
scale: Vec3::splat(3.0),
..default()
},
Sprite::from_atlas_image(
atlas_texture,
atlas_sources.handle(atlas_handle, vendor_handle).unwrap(),
),
));
}
/// Create and spawn a label (text)
fn create_label(
commands: &mut Commands,
translation: (f32, f32, f32),
text: &str,
text_style: TextFont,
) {
commands.spawn((
Text2d::new(text),
text_style,
TextLayout::new_with_justify(JustifyText::Center),
Transform {
translation: Vec3::new(translation.0, translation.1, translation.2),
..default()
},
));
}

View File

@@ -0,0 +1,39 @@
//! Demonstrates how to use transparency in 2D.
//! Shows 3 bevy logos on top of each other, each with a different amount of transparency.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn(Camera2d);
let sprite_handle = asset_server.load("branding/icon.png");
commands.spawn((
Sprite::from_image(sprite_handle.clone()),
Transform::from_xyz(-100.0, 0.0, 0.0),
));
commands.spawn((
Sprite {
image: sprite_handle.clone(),
// Alpha channel of the color controls transparency.
color: Color::srgba(0.0, 0.0, 1.0, 0.7),
..default()
},
Transform::from_xyz(0.0, 0.0, 0.1),
));
commands.spawn((
Sprite {
image: sprite_handle,
color: Color::srgba(0.0, 1.0, 0.0, 0.3),
..default()
},
Transform::from_xyz(100.0, 0.0, 0.2),
));
}

147
vendor/bevy/examples/2d/wireframe_2d.rs vendored Normal file
View File

@@ -0,0 +1,147 @@
//! Showcases wireframe rendering for 2d meshes.
//!
//! Wireframes currently do not work when using webgl or webgpu.
//! Supported platforms:
//! - DX12
//! - Vulkan
//! - Metal
//!
//! This is a native only feature.
use bevy::{
color::palettes::basic::{GREEN, RED, WHITE},
prelude::*,
render::{
render_resource::WgpuFeatures,
settings::{RenderCreation, WgpuSettings},
RenderPlugin,
},
sprite::{NoWireframe2d, Wireframe2d, Wireframe2dColor, Wireframe2dConfig, Wireframe2dPlugin},
};
fn main() {
App::new()
.add_plugins((
DefaultPlugins.set(RenderPlugin {
render_creation: RenderCreation::Automatic(WgpuSettings {
// WARN this is a native only feature. It will not work with webgl or webgpu
features: WgpuFeatures::POLYGON_MODE_LINE,
..default()
}),
..default()
}),
// You need to add this plugin to enable wireframe rendering
Wireframe2dPlugin::default(),
))
// Wireframes can be configured with this resource. This can be changed at runtime.
.insert_resource(Wireframe2dConfig {
// The global wireframe config enables drawing of wireframes on every mesh,
// except those with `NoWireframe2d`. Meshes with `Wireframe2d` will always have a wireframe,
// regardless of the global configuration.
global: true,
// Controls the default color of all wireframes. Used as the default color for global wireframes.
// Can be changed per mesh using the `Wireframe2dColor` component.
default_color: WHITE.into(),
})
.add_systems(Startup, setup)
.add_systems(Update, update_colors)
.run();
}
/// Set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorMaterial>>,
) {
// Triangle: Never renders a wireframe
commands.spawn((
Mesh2d(meshes.add(Triangle2d::new(
Vec2::new(0.0, 50.0),
Vec2::new(-50.0, -50.0),
Vec2::new(50.0, -50.0),
))),
MeshMaterial2d(materials.add(Color::BLACK)),
Transform::from_xyz(-150.0, 0.0, 0.0),
NoWireframe2d,
));
// Rectangle: Follows global wireframe setting
commands.spawn((
Mesh2d(meshes.add(Rectangle::new(100.0, 100.0))),
MeshMaterial2d(materials.add(Color::BLACK)),
Transform::from_xyz(0.0, 0.0, 0.0),
));
// Circle: Always renders a wireframe
commands.spawn((
Mesh2d(meshes.add(Circle::new(50.0))),
MeshMaterial2d(materials.add(Color::BLACK)),
Transform::from_xyz(150.0, 0.0, 0.0),
Wireframe2d,
// This lets you configure the wireframe color of this entity.
// If not set, this will use the color in `WireframeConfig`
Wireframe2dColor {
color: GREEN.into(),
},
));
commands.spawn(Camera2d);
// Text used to show controls
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// This system lets you toggle various wireframe settings
fn update_colors(
keyboard_input: Res<ButtonInput<KeyCode>>,
mut config: ResMut<Wireframe2dConfig>,
mut wireframe_colors: Query<&mut Wireframe2dColor>,
mut text: Single<&mut Text>,
) {
text.0 = format!(
"Controls
---------------
Z - Toggle global
X - Change global color
C - Change color of the circle wireframe
Wireframe2dConfig
-------------
Global: {}
Color: {:?}",
config.global,
config.default_color.to_srgba(),
);
// Toggle showing a wireframe on all meshes
if keyboard_input.just_pressed(KeyCode::KeyZ) {
config.global = !config.global;
}
// Toggle the global wireframe color
if keyboard_input.just_pressed(KeyCode::KeyX) {
config.default_color = if config.default_color == WHITE.into() {
RED.into()
} else {
WHITE.into()
};
}
// Toggle the color of a wireframe using `Wireframe2dColor` and not the global color
if keyboard_input.just_pressed(KeyCode::KeyC) {
for mut color in &mut wireframe_colors {
color.color = if color.color == GREEN.into() {
RED.into()
} else {
GREEN.into()
};
}
}
}

43
vendor/bevy/examples/3d/3d_scene.rs vendored Normal file
View File

@@ -0,0 +1,43 @@
//! A simple 3D scene with light shining over a cube sitting on a plane.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// circular base
commands.spawn((
Mesh3d(meshes.add(Circle::new(4.0))),
MeshMaterial3d(materials.add(Color::WHITE)),
Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)),
));
// cube
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))),
MeshMaterial3d(materials.add(Color::srgb_u8(124, 144, 255))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}

190
vendor/bevy/examples/3d/3d_shapes.rs vendored Normal file
View File

@@ -0,0 +1,190 @@
//! This example demonstrates the built-in 3d shapes in Bevy.
//! The scene includes a patterned texture and a rotation for visualizing the normals and UVs.
//!
//! You can toggle wireframes with the space bar except on wasm. Wasm does not support
//! `POLYGON_MODE_LINE` on the gpu.
use std::f32::consts::PI;
#[cfg(not(target_arch = "wasm32"))]
use bevy::pbr::wireframe::{WireframeConfig, WireframePlugin};
use bevy::{
color::palettes::basic::SILVER,
prelude::*,
render::{
render_asset::RenderAssetUsages,
render_resource::{Extent3d, TextureDimension, TextureFormat},
},
};
fn main() {
App::new()
.add_plugins((
DefaultPlugins.set(ImagePlugin::default_nearest()),
#[cfg(not(target_arch = "wasm32"))]
WireframePlugin::default(),
))
.add_systems(Startup, setup)
.add_systems(
Update,
(
rotate,
#[cfg(not(target_arch = "wasm32"))]
toggle_wireframe,
),
)
.run();
}
/// A marker component for our shapes so we can query them separately from the ground plane
#[derive(Component)]
struct Shape;
const SHAPES_X_EXTENT: f32 = 14.0;
const EXTRUSION_X_EXTENT: f32 = 16.0;
const Z_EXTENT: f32 = 5.0;
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut images: ResMut<Assets<Image>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
let debug_material = materials.add(StandardMaterial {
base_color_texture: Some(images.add(uv_debug_texture())),
..default()
});
let shapes = [
meshes.add(Cuboid::default()),
meshes.add(Tetrahedron::default()),
meshes.add(Capsule3d::default()),
meshes.add(Torus::default()),
meshes.add(Cylinder::default()),
meshes.add(Cone::default()),
meshes.add(ConicalFrustum::default()),
meshes.add(Sphere::default().mesh().ico(5).unwrap()),
meshes.add(Sphere::default().mesh().uv(32, 18)),
];
let extrusions = [
meshes.add(Extrusion::new(Rectangle::default(), 1.)),
meshes.add(Extrusion::new(Capsule2d::default(), 1.)),
meshes.add(Extrusion::new(Annulus::default(), 1.)),
meshes.add(Extrusion::new(Circle::default(), 1.)),
meshes.add(Extrusion::new(Ellipse::default(), 1.)),
meshes.add(Extrusion::new(RegularPolygon::default(), 1.)),
meshes.add(Extrusion::new(Triangle2d::default(), 1.)),
];
let num_shapes = shapes.len();
for (i, shape) in shapes.into_iter().enumerate() {
commands.spawn((
Mesh3d(shape),
MeshMaterial3d(debug_material.clone()),
Transform::from_xyz(
-SHAPES_X_EXTENT / 2. + i as f32 / (num_shapes - 1) as f32 * SHAPES_X_EXTENT,
2.0,
Z_EXTENT / 2.,
)
.with_rotation(Quat::from_rotation_x(-PI / 4.)),
Shape,
));
}
let num_extrusions = extrusions.len();
for (i, shape) in extrusions.into_iter().enumerate() {
commands.spawn((
Mesh3d(shape),
MeshMaterial3d(debug_material.clone()),
Transform::from_xyz(
-EXTRUSION_X_EXTENT / 2.
+ i as f32 / (num_extrusions - 1) as f32 * EXTRUSION_X_EXTENT,
2.0,
-Z_EXTENT / 2.,
)
.with_rotation(Quat::from_rotation_x(-PI / 4.)),
Shape,
));
}
commands.spawn((
PointLight {
shadows_enabled: true,
intensity: 10_000_000.,
range: 100.0,
shadow_depth_bias: 0.2,
..default()
},
Transform::from_xyz(8.0, 16.0, 8.0),
));
// ground plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(50.0, 50.0).subdivisions(10))),
MeshMaterial3d(materials.add(Color::from(SILVER))),
));
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 7., 14.0).looking_at(Vec3::new(0., 1., 0.), Vec3::Y),
));
#[cfg(not(target_arch = "wasm32"))]
commands.spawn((
Text::new("Press space to toggle wireframes"),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
fn rotate(mut query: Query<&mut Transform, With<Shape>>, time: Res<Time>) {
for mut transform in &mut query {
transform.rotate_y(time.delta_secs() / 2.);
}
}
/// Creates a colorful test pattern
fn uv_debug_texture() -> Image {
const TEXTURE_SIZE: usize = 8;
let mut palette: [u8; 32] = [
255, 102, 159, 255, 255, 159, 102, 255, 236, 255, 102, 255, 121, 255, 102, 255, 102, 255,
198, 255, 102, 198, 255, 255, 121, 102, 255, 255, 236, 102, 255, 255,
];
let mut texture_data = [0; TEXTURE_SIZE * TEXTURE_SIZE * 4];
for y in 0..TEXTURE_SIZE {
let offset = TEXTURE_SIZE * y * 4;
texture_data[offset..(offset + TEXTURE_SIZE * 4)].copy_from_slice(&palette);
palette.rotate_right(4);
}
Image::new_fill(
Extent3d {
width: TEXTURE_SIZE as u32,
height: TEXTURE_SIZE as u32,
depth_or_array_layers: 1,
},
TextureDimension::D2,
&texture_data,
TextureFormat::Rgba8UnormSrgb,
RenderAssetUsages::RENDER_WORLD,
)
}
#[cfg(not(target_arch = "wasm32"))]
fn toggle_wireframe(
mut wireframe_config: ResMut<WireframeConfig>,
keyboard: Res<ButtonInput<KeyCode>>,
) {
if keyboard.just_pressed(KeyCode::Space) {
wireframe_config.global = !wireframe_config.global;
}
}

View File

@@ -0,0 +1,79 @@
//! This example demonstrates how to use the `Camera::viewport_to_world` method.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, draw_cursor)
.run();
}
fn draw_cursor(
camera_query: Single<(&Camera, &GlobalTransform)>,
ground: Single<&GlobalTransform, With<Ground>>,
windows: Query<&Window>,
mut gizmos: Gizmos,
) {
let Ok(windows) = windows.single() else {
return;
};
let (camera, camera_transform) = *camera_query;
let Some(cursor_position) = windows.cursor_position() else {
return;
};
// Calculate a ray pointing from the camera into the world based on the cursor's position.
let Ok(ray) = camera.viewport_to_world(camera_transform, cursor_position) else {
return;
};
// Calculate if and where the ray is hitting the ground plane.
let Some(distance) =
ray.intersect_plane(ground.translation(), InfinitePlane3d::new(ground.up()))
else {
return;
};
let point = ray.get_point(distance);
// Draw a circle just above the ground plane at that position.
gizmos.circle(
Isometry3d::new(
point + ground.up() * 0.01,
Quat::from_rotation_arc(Vec3::Z, ground.up().as_vec3()),
),
0.2,
Color::WHITE,
);
}
#[derive(Component)]
struct Ground;
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(20., 20.))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
Ground,
));
// light
commands.spawn((
DirectionalLight::default(),
Transform::from_translation(Vec3::ONE).looking_at(Vec3::ZERO, Vec3::Y),
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(15.0, 5.0, 15.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}

View File

@@ -0,0 +1,59 @@
//! Shows how to animate material properties
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, animate_materials)
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(3.0, 1.0, 3.0).looking_at(Vec3::new(0.0, -0.5, 0.0), Vec3::Y),
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2_000.0,
..default()
},
));
let cube = meshes.add(Cuboid::new(0.5, 0.5, 0.5));
const GOLDEN_ANGLE: f32 = 137.507_77;
let mut hsla = Hsla::hsl(0.0, 1.0, 0.5);
for x in -1..2 {
for z in -1..2 {
commands.spawn((
Mesh3d(cube.clone()),
MeshMaterial3d(materials.add(Color::from(hsla))),
Transform::from_translation(Vec3::new(x as f32, 0.0, z as f32)),
));
hsla = hsla.rotate_hue(GOLDEN_ANGLE);
}
}
}
fn animate_materials(
material_handles: Query<&MeshMaterial3d<StandardMaterial>>,
time: Res<Time>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
for material_handle in material_handles.iter() {
if let Some(material) = materials.get_mut(material_handle) {
if let Color::Hsla(ref mut hsla) = material.base_color {
*hsla = hsla.rotate_hue(time.delta_secs() * 100.0);
}
}
}
}

372
vendor/bevy/examples/3d/anisotropy.rs vendored Normal file
View File

@@ -0,0 +1,372 @@
//! Demonstrates anisotropy with the glTF sample barn lamp model.
use std::fmt::Display;
use bevy::{
color::palettes::{self, css::WHITE},
core_pipeline::Skybox,
math::vec3,
prelude::*,
time::Stopwatch,
};
/// The initial position of the camera.
const CAMERA_INITIAL_POSITION: Vec3 = vec3(-0.4, 0.0, 0.0);
/// The current settings of the app, as chosen by the user.
#[derive(Resource)]
struct AppStatus {
/// Which type of light is in the scene.
light_mode: LightMode,
/// Whether anisotropy is enabled.
anisotropy_enabled: bool,
/// Which mesh is visible
visible_scene: Scene,
}
/// Which type of light we're using: a directional light, a point light, or an
/// environment map.
#[derive(Clone, Copy, PartialEq, Default)]
enum LightMode {
/// A rotating directional light.
#[default]
Directional,
/// A rotating point light.
Point,
/// An environment map (image-based lighting, including skybox).
EnvironmentMap,
}
/// A component that stores the version of the material with anisotropy and the
/// version of the material without it.
///
/// This is placed on each mesh with a material. It exists so that the
/// appropriate system can replace the materials when the user presses Enter to
/// turn anisotropy on and off.
#[derive(Component)]
struct MaterialVariants {
/// The version of the material in the glTF file, with anisotropy.
anisotropic: Handle<StandardMaterial>,
/// The version of the material with anisotropy removed.
isotropic: Handle<StandardMaterial>,
}
#[derive(Default, Clone, Copy, PartialEq, Eq, Component)]
enum Scene {
#[default]
BarnLamp,
Sphere,
}
impl Scene {
fn next(&self) -> Self {
match self {
Self::BarnLamp => Self::Sphere,
Self::Sphere => Self::BarnLamp,
}
}
}
impl Display for Scene {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let scene_name = match self {
Self::BarnLamp => "Barn Lamp",
Self::Sphere => "Sphere",
};
write!(f, "{scene_name}")
}
}
/// The application entry point.
fn main() {
App::new()
.init_resource::<AppStatus>()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Anisotropy Example".into(),
..default()
}),
..default()
}))
.add_systems(Startup, setup)
.add_systems(Update, create_material_variants)
.add_systems(Update, animate_light)
.add_systems(Update, rotate_camera)
.add_systems(Update, (handle_input, update_help_text).chain())
.run();
}
/// Creates the initial scene.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_status: Res<AppStatus>) {
commands.spawn((
Camera3d::default(),
Transform::from_translation(CAMERA_INITIAL_POSITION).looking_at(Vec3::ZERO, Vec3::Y),
));
spawn_directional_light(&mut commands);
commands.spawn((
SceneRoot(asset_server.load("models/AnisotropyBarnLamp/AnisotropyBarnLamp.gltf#Scene0")),
Transform::from_xyz(0.0, 0.07, -0.13),
Scene::BarnLamp,
));
commands.spawn((
Mesh3d(
asset_server.add(
Mesh::from(Sphere::new(0.1))
.with_generated_tangents()
.unwrap(),
),
),
MeshMaterial3d(asset_server.add(StandardMaterial {
base_color: palettes::tailwind::GRAY_300.into(),
anisotropy_rotation: 0.5,
anisotropy_strength: 1.,
..default()
})),
Scene::Sphere,
Visibility::Hidden,
));
spawn_text(&mut commands, &app_status);
}
/// Spawns the help text.
fn spawn_text(commands: &mut Commands, app_status: &AppStatus) {
commands.spawn((
app_status.create_help_text(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// For each material, creates a version with the anisotropy removed.
///
/// This allows the user to press Enter to toggle anisotropy on and off.
fn create_material_variants(
mut commands: Commands,
mut materials: ResMut<Assets<StandardMaterial>>,
new_meshes: Query<
(Entity, &MeshMaterial3d<StandardMaterial>),
(
Added<MeshMaterial3d<StandardMaterial>>,
Without<MaterialVariants>,
),
>,
) {
for (entity, anisotropic_material_handle) in new_meshes.iter() {
let Some(anisotropic_material) = materials.get(anisotropic_material_handle).cloned() else {
continue;
};
commands.entity(entity).insert(MaterialVariants {
anisotropic: anisotropic_material_handle.0.clone(),
isotropic: materials.add(StandardMaterial {
anisotropy_texture: None,
anisotropy_strength: 0.0,
anisotropy_rotation: 0.0,
..anisotropic_material
}),
});
}
}
/// A system that animates the light every frame, if there is one.
fn animate_light(
mut lights: Query<&mut Transform, Or<(With<DirectionalLight>, With<PointLight>)>>,
time: Res<Time>,
) {
let now = time.elapsed_secs();
for mut transform in lights.iter_mut() {
transform.translation = vec3(ops::cos(now), 1.0, ops::sin(now)) * vec3(3.0, 4.0, 3.0);
transform.look_at(Vec3::ZERO, Vec3::Y);
}
}
/// A system that rotates the camera if the environment map is enabled.
fn rotate_camera(
mut camera: Query<&mut Transform, With<Camera>>,
app_status: Res<AppStatus>,
time: Res<Time>,
mut stopwatch: Local<Stopwatch>,
) {
if app_status.light_mode == LightMode::EnvironmentMap {
stopwatch.tick(time.delta());
}
let now = stopwatch.elapsed_secs();
for mut transform in camera.iter_mut() {
*transform = Transform::from_translation(
Quat::from_rotation_y(now).mul_vec3(CAMERA_INITIAL_POSITION),
)
.looking_at(Vec3::ZERO, Vec3::Y);
}
}
/// Handles requests from the user to change the lighting or toggle anisotropy.
fn handle_input(
mut commands: Commands,
asset_server: Res<AssetServer>,
cameras: Query<Entity, With<Camera>>,
lights: Query<Entity, Or<(With<DirectionalLight>, With<PointLight>)>>,
mut meshes: Query<(&mut MeshMaterial3d<StandardMaterial>, &MaterialVariants)>,
mut scenes: Query<(&mut Visibility, &Scene)>,
keyboard: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
) {
// If Space was pressed, change the lighting.
if keyboard.just_pressed(KeyCode::Space) {
match app_status.light_mode {
LightMode::Directional => {
// Switch to a point light. Despawn all existing lights and
// create the light point.
app_status.light_mode = LightMode::Point;
for light in lights.iter() {
commands.entity(light).despawn();
}
spawn_point_light(&mut commands);
}
LightMode::Point => {
// Switch to the environment map. Despawn all existing lights,
// and create the skybox and environment map.
app_status.light_mode = LightMode::EnvironmentMap;
for light in lights.iter() {
commands.entity(light).despawn();
}
for camera in cameras.iter() {
add_skybox_and_environment_map(&mut commands, &asset_server, camera);
}
}
LightMode::EnvironmentMap => {
// Switch back to a directional light. Despawn the skybox and
// environment map light, and recreate the directional light.
app_status.light_mode = LightMode::Directional;
for camera in cameras.iter() {
commands
.entity(camera)
.remove::<Skybox>()
.remove::<EnvironmentMapLight>();
}
spawn_directional_light(&mut commands);
}
}
}
// If Enter was pressed, toggle anisotropy on and off.
if keyboard.just_pressed(KeyCode::Enter) {
app_status.anisotropy_enabled = !app_status.anisotropy_enabled;
// Go through each mesh and alter its material.
for (mut material_handle, material_variants) in meshes.iter_mut() {
material_handle.0 = if app_status.anisotropy_enabled {
material_variants.anisotropic.clone()
} else {
material_variants.isotropic.clone()
}
}
}
if keyboard.just_pressed(KeyCode::KeyQ) {
app_status.visible_scene = app_status.visible_scene.next();
for (mut visibility, scene) in scenes.iter_mut() {
let new_vis = if *scene == app_status.visible_scene {
Visibility::Inherited
} else {
Visibility::Hidden
};
*visibility = new_vis;
}
}
}
/// A system that updates the help text based on the current app status.
fn update_help_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) {
for mut text in text_query.iter_mut() {
*text = app_status.create_help_text();
}
}
/// Adds the skybox and environment map to the scene.
fn add_skybox_and_environment_map(
commands: &mut Commands,
asset_server: &AssetServer,
entity: Entity,
) {
commands
.entity(entity)
.insert(Skybox {
brightness: 5000.0,
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
..default()
})
.insert(EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2500.0,
..default()
});
}
/// Spawns a rotating directional light.
fn spawn_directional_light(commands: &mut Commands) {
commands.spawn(DirectionalLight {
color: WHITE.into(),
illuminance: 3000.0,
..default()
});
}
/// Spawns a rotating point light.
fn spawn_point_light(commands: &mut Commands) {
commands.spawn(PointLight {
color: WHITE.into(),
intensity: 200000.0,
..default()
});
}
impl AppStatus {
/// Creates the help text as appropriate for the current app status.
fn create_help_text(&self) -> Text {
// Choose the appropriate help text for the anisotropy toggle.
let material_variant_help_text = if self.anisotropy_enabled {
"Press Enter to disable anisotropy"
} else {
"Press Enter to enable anisotropy"
};
// Choose the appropriate help text for the light toggle.
let light_help_text = match self.light_mode {
LightMode::Directional => "Press Space to switch to a point light",
LightMode::Point => "Press Space to switch to an environment map",
LightMode::EnvironmentMap => "Press Space to switch to a directional light",
};
// Choose the appropriate help text for the scene selector.
let mesh_help_text = format!("Press Q to change to {}", self.visible_scene.next());
// Build the `Text` object.
format!(
"{}\n{}\n{}",
material_variant_help_text, light_help_text, mesh_help_text,
)
.into()
}
}
impl Default for AppStatus {
fn default() -> Self {
Self {
light_mode: default(),
anisotropy_enabled: true,
visible_scene: default(),
}
}
}

375
vendor/bevy/examples/3d/anti_aliasing.rs vendored Normal file
View File

@@ -0,0 +1,375 @@
//! This example compares MSAA (Multi-Sample Anti-aliasing), FXAA (Fast Approximate Anti-aliasing), and TAA (Temporal Anti-aliasing).
use std::{f32::consts::PI, fmt::Write};
use bevy::{
core_pipeline::{
contrast_adaptive_sharpening::ContrastAdaptiveSharpening,
experimental::taa::{TemporalAntiAliasPlugin, TemporalAntiAliasing},
fxaa::{Fxaa, Sensitivity},
prepass::{DepthPrepass, MotionVectorPrepass},
smaa::{Smaa, SmaaPreset},
},
image::{ImageSampler, ImageSamplerDescriptor},
pbr::CascadeShadowConfigBuilder,
prelude::*,
render::{
camera::TemporalJitter,
render_asset::RenderAssetUsages,
render_resource::{Extent3d, TextureDimension, TextureFormat},
},
};
fn main() {
App::new()
.add_plugins((DefaultPlugins, TemporalAntiAliasPlugin))
.add_systems(Startup, setup)
.add_systems(Update, (modify_aa, modify_sharpening, update_ui))
.run();
}
type TaaComponents = (
TemporalAntiAliasing,
TemporalJitter,
DepthPrepass,
MotionVectorPrepass,
);
fn modify_aa(
keys: Res<ButtonInput<KeyCode>>,
camera: Single<
(
Entity,
Option<&mut Fxaa>,
Option<&mut Smaa>,
Option<&TemporalAntiAliasing>,
&mut Msaa,
),
With<Camera>,
>,
mut commands: Commands,
) {
let (camera_entity, fxaa, smaa, taa, mut msaa) = camera.into_inner();
let mut camera = commands.entity(camera_entity);
// No AA
if keys.just_pressed(KeyCode::Digit1) {
*msaa = Msaa::Off;
camera
.remove::<Fxaa>()
.remove::<Smaa>()
.remove::<TaaComponents>();
}
// MSAA
if keys.just_pressed(KeyCode::Digit2) && *msaa == Msaa::Off {
camera
.remove::<Fxaa>()
.remove::<Smaa>()
.remove::<TaaComponents>();
*msaa = Msaa::Sample4;
}
// MSAA Sample Count
if *msaa != Msaa::Off {
if keys.just_pressed(KeyCode::KeyQ) {
*msaa = Msaa::Sample2;
}
if keys.just_pressed(KeyCode::KeyW) {
*msaa = Msaa::Sample4;
}
if keys.just_pressed(KeyCode::KeyE) {
*msaa = Msaa::Sample8;
}
}
// FXAA
if keys.just_pressed(KeyCode::Digit3) && fxaa.is_none() {
*msaa = Msaa::Off;
camera
.remove::<Smaa>()
.remove::<TaaComponents>()
.insert(Fxaa::default());
}
// FXAA Settings
if let Some(mut fxaa) = fxaa {
if keys.just_pressed(KeyCode::KeyQ) {
fxaa.edge_threshold = Sensitivity::Low;
fxaa.edge_threshold_min = Sensitivity::Low;
}
if keys.just_pressed(KeyCode::KeyW) {
fxaa.edge_threshold = Sensitivity::Medium;
fxaa.edge_threshold_min = Sensitivity::Medium;
}
if keys.just_pressed(KeyCode::KeyE) {
fxaa.edge_threshold = Sensitivity::High;
fxaa.edge_threshold_min = Sensitivity::High;
}
if keys.just_pressed(KeyCode::KeyR) {
fxaa.edge_threshold = Sensitivity::Ultra;
fxaa.edge_threshold_min = Sensitivity::Ultra;
}
if keys.just_pressed(KeyCode::KeyT) {
fxaa.edge_threshold = Sensitivity::Extreme;
fxaa.edge_threshold_min = Sensitivity::Extreme;
}
}
// SMAA
if keys.just_pressed(KeyCode::Digit4) && smaa.is_none() {
*msaa = Msaa::Off;
camera
.remove::<Fxaa>()
.remove::<TaaComponents>()
.insert(Smaa::default());
}
// SMAA Settings
if let Some(mut smaa) = smaa {
if keys.just_pressed(KeyCode::KeyQ) {
smaa.preset = SmaaPreset::Low;
}
if keys.just_pressed(KeyCode::KeyW) {
smaa.preset = SmaaPreset::Medium;
}
if keys.just_pressed(KeyCode::KeyE) {
smaa.preset = SmaaPreset::High;
}
if keys.just_pressed(KeyCode::KeyR) {
smaa.preset = SmaaPreset::Ultra;
}
}
// TAA
if keys.just_pressed(KeyCode::Digit5) && taa.is_none() {
*msaa = Msaa::Off;
camera
.remove::<Fxaa>()
.remove::<Smaa>()
.insert(TemporalAntiAliasing::default());
}
}
fn modify_sharpening(
keys: Res<ButtonInput<KeyCode>>,
mut query: Query<&mut ContrastAdaptiveSharpening>,
) {
for mut cas in &mut query {
if keys.just_pressed(KeyCode::Digit0) {
cas.enabled = !cas.enabled;
}
if cas.enabled {
if keys.just_pressed(KeyCode::Minus) {
cas.sharpening_strength -= 0.1;
cas.sharpening_strength = cas.sharpening_strength.clamp(0.0, 1.0);
}
if keys.just_pressed(KeyCode::Equal) {
cas.sharpening_strength += 0.1;
cas.sharpening_strength = cas.sharpening_strength.clamp(0.0, 1.0);
}
if keys.just_pressed(KeyCode::KeyD) {
cas.denoise = !cas.denoise;
}
}
}
}
fn update_ui(
camera: Single<
(
Option<&Fxaa>,
Option<&Smaa>,
Option<&TemporalAntiAliasing>,
&ContrastAdaptiveSharpening,
&Msaa,
),
With<Camera>,
>,
mut ui: Single<&mut Text>,
) {
let (fxaa, smaa, taa, cas, msaa) = *camera;
let ui = &mut ui.0;
*ui = "Antialias Method\n".to_string();
draw_selectable_menu_item(
ui,
"No AA",
'1',
*msaa == Msaa::Off && fxaa.is_none() && taa.is_none() && smaa.is_none(),
);
draw_selectable_menu_item(ui, "MSAA", '2', *msaa != Msaa::Off);
draw_selectable_menu_item(ui, "FXAA", '3', fxaa.is_some());
draw_selectable_menu_item(ui, "SMAA", '4', smaa.is_some());
draw_selectable_menu_item(ui, "TAA", '5', taa.is_some());
if *msaa != Msaa::Off {
ui.push_str("\n----------\n\nSample Count\n");
draw_selectable_menu_item(ui, "2", 'Q', *msaa == Msaa::Sample2);
draw_selectable_menu_item(ui, "4", 'W', *msaa == Msaa::Sample4);
draw_selectable_menu_item(ui, "8", 'E', *msaa == Msaa::Sample8);
}
if let Some(fxaa) = fxaa {
ui.push_str("\n----------\n\nSensitivity\n");
draw_selectable_menu_item(ui, "Low", 'Q', fxaa.edge_threshold == Sensitivity::Low);
draw_selectable_menu_item(
ui,
"Medium",
'W',
fxaa.edge_threshold == Sensitivity::Medium,
);
draw_selectable_menu_item(ui, "High", 'E', fxaa.edge_threshold == Sensitivity::High);
draw_selectable_menu_item(ui, "Ultra", 'R', fxaa.edge_threshold == Sensitivity::Ultra);
draw_selectable_menu_item(
ui,
"Extreme",
'T',
fxaa.edge_threshold == Sensitivity::Extreme,
);
}
if let Some(smaa) = smaa {
ui.push_str("\n----------\n\nQuality\n");
draw_selectable_menu_item(ui, "Low", 'Q', smaa.preset == SmaaPreset::Low);
draw_selectable_menu_item(ui, "Medium", 'W', smaa.preset == SmaaPreset::Medium);
draw_selectable_menu_item(ui, "High", 'E', smaa.preset == SmaaPreset::High);
draw_selectable_menu_item(ui, "Ultra", 'R', smaa.preset == SmaaPreset::Ultra);
}
ui.push_str("\n----------\n\n");
draw_selectable_menu_item(ui, "Sharpening", '0', cas.enabled);
if cas.enabled {
ui.push_str(&format!("(-/+) Strength: {:.1}\n", cas.sharpening_strength));
draw_selectable_menu_item(ui, "Denoising", 'D', cas.denoise);
}
}
/// Set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut images: ResMut<Assets<Image>>,
asset_server: Res<AssetServer>,
) {
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(50.0, 50.0))),
MeshMaterial3d(materials.add(Color::srgb(0.1, 0.2, 0.1))),
));
let cube_material = materials.add(StandardMaterial {
base_color_texture: Some(images.add(uv_debug_texture())),
..default()
});
// Cubes
for i in 0..5 {
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(0.25, 0.25, 0.25))),
MeshMaterial3d(cube_material.clone()),
Transform::from_xyz(i as f32 * 0.25 - 1.0, 0.125, -i as f32 * 0.5),
));
}
// Flight Helmet
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf"),
)));
// Light
commands.spawn((
DirectionalLight {
illuminance: light_consts::lux::FULL_DAYLIGHT,
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
CascadeShadowConfigBuilder {
maximum_distance: 3.0,
first_cascade_far_bound: 0.9,
..default()
}
.build(),
));
// Camera
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y),
ContrastAdaptiveSharpening {
enabled: false,
..default()
},
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 150.0,
..default()
},
DistanceFog {
color: Color::srgba_u8(43, 44, 47, 255),
falloff: FogFalloff::Linear {
start: 1.0,
end: 4.0,
},
..default()
},
));
// example instructions
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// Writes a simple menu item that can be on or off.
fn draw_selectable_menu_item(ui: &mut String, label: &str, shortcut: char, enabled: bool) {
let star = if enabled { "*" } else { "" };
let _ = writeln!(*ui, "({shortcut}) {star}{label}{star}");
}
/// Creates a colorful test pattern
fn uv_debug_texture() -> Image {
const TEXTURE_SIZE: usize = 8;
let mut palette: [u8; 32] = [
255, 102, 159, 255, 255, 159, 102, 255, 236, 255, 102, 255, 121, 255, 102, 255, 102, 255,
198, 255, 102, 198, 255, 255, 121, 102, 255, 255, 236, 102, 255, 255,
];
let mut texture_data = [0; TEXTURE_SIZE * TEXTURE_SIZE * 4];
for y in 0..TEXTURE_SIZE {
let offset = TEXTURE_SIZE * y * 4;
texture_data[offset..(offset + TEXTURE_SIZE * 4)].copy_from_slice(&palette);
palette.rotate_right(4);
}
let mut img = Image::new_fill(
Extent3d {
width: TEXTURE_SIZE as u32,
height: TEXTURE_SIZE as u32,
depth_or_array_layers: 1,
},
TextureDimension::D2,
&texture_data,
TextureFormat::Rgba8UnormSrgb,
RenderAssetUsages::RENDER_WORLD,
);
img.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor::default());
img
}

125
vendor/bevy/examples/3d/atmosphere.rs vendored Normal file
View File

@@ -0,0 +1,125 @@
//! This example showcases pbr atmospheric scattering
use std::f32::consts::PI;
use bevy::{
core_pipeline::{bloom::Bloom, tonemapping::Tonemapping},
pbr::{light_consts::lux, Atmosphere, AtmosphereSettings, CascadeShadowConfigBuilder},
prelude::*,
render::camera::Exposure,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, (setup_camera_fog, setup_terrain_scene))
.add_systems(Update, dynamic_scene)
.run();
}
fn setup_camera_fog(mut commands: Commands) {
commands.spawn((
Camera3d::default(),
// HDR is required for atmospheric scattering to be properly applied to the scene
Camera {
hdr: true,
..default()
},
Transform::from_xyz(-1.2, 0.15, 0.0).looking_at(Vec3::Y * 0.1, Vec3::Y),
// This is the component that enables atmospheric scattering for a camera
Atmosphere::EARTH,
// The scene is in units of 10km, so we need to scale up the
// aerial view lut distance and set the scene scale accordingly.
// Most usages of this feature will not need to adjust this.
AtmosphereSettings {
aerial_view_lut_max_distance: 3.2e5,
scene_units_to_m: 1e+4,
..Default::default()
},
// The directional light illuminance used in this scene
// (the one recommended for use with this feature) is
// quite bright, so raising the exposure compensation helps
// bring the scene to a nicer brightness range.
Exposure::SUNLIGHT,
// Tonemapper chosen just because it looked good with the scene, any
// tonemapper would be fine :)
Tonemapping::AcesFitted,
// Bloom gives the sun a much more natural look.
Bloom::NATURAL,
));
}
#[derive(Component)]
struct Terrain;
fn setup_terrain_scene(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
// Configure a properly scaled cascade shadow map for this scene (defaults are too large, mesh units are in km)
let cascade_shadow_config = CascadeShadowConfigBuilder {
first_cascade_far_bound: 0.3,
maximum_distance: 3.0,
..default()
}
.build();
// Sun
commands.spawn((
DirectionalLight {
shadows_enabled: true,
// lux::RAW_SUNLIGHT is recommended for use with this feature, since
// other values approximate sunlight *post-scattering* in various
// conditions. RAW_SUNLIGHT in comparison is the illuminance of the
// sun unfiltered by the atmosphere, so it is the proper input for
// sunlight to be filtered by the atmosphere.
illuminance: lux::RAW_SUNLIGHT,
..default()
},
Transform::from_xyz(1.0, -0.4, 0.0).looking_at(Vec3::ZERO, Vec3::Y),
cascade_shadow_config,
));
let sphere_mesh = meshes.add(Mesh::from(Sphere { radius: 1.0 }));
// light probe spheres
commands.spawn((
Mesh3d(sphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::WHITE,
metallic: 1.0,
perceptual_roughness: 0.0,
..default()
})),
Transform::from_xyz(-0.3, 0.1, -0.1).with_scale(Vec3::splat(0.05)),
));
commands.spawn((
Mesh3d(sphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::WHITE,
metallic: 0.0,
perceptual_roughness: 1.0,
..default()
})),
Transform::from_xyz(-0.3, 0.1, 0.1).with_scale(Vec3::splat(0.05)),
));
// Terrain
commands.spawn((
Terrain,
SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/terrain/terrain.glb")),
),
Transform::from_xyz(-1.0, 0.0, -0.5)
.with_scale(Vec3::splat(0.5))
.with_rotation(Quat::from_rotation_y(PI / 2.0)),
));
}
fn dynamic_scene(mut suns: Query<&mut Transform, With<DirectionalLight>>, time: Res<Time>) {
suns.iter_mut()
.for_each(|mut tf| tf.rotate_x(-time.delta_secs() * PI / 10.0));
}

View File

@@ -0,0 +1,108 @@
//! This example showcases atmospheric fog
//!
//! ## Controls
//!
//! | Key Binding | Action |
//! |:-------------------|:---------------------------------------|
//! | `Spacebar` | Toggle Atmospheric Fog |
//! | `S` | Toggle Directional Light Fog Influence |
use bevy::{
pbr::{CascadeShadowConfigBuilder, NotShadowCaster},
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(
Startup,
(setup_camera_fog, setup_terrain_scene, setup_instructions),
)
.add_systems(Update, toggle_system)
.run();
}
fn setup_camera_fog(mut commands: Commands) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-1.0, 0.1, 1.0).looking_at(Vec3::new(0.0, 0.0, 0.0), Vec3::Y),
DistanceFog {
color: Color::srgba(0.35, 0.48, 0.66, 1.0),
directional_light_color: Color::srgba(1.0, 0.95, 0.85, 0.5),
directional_light_exponent: 30.0,
falloff: FogFalloff::from_visibility_colors(
15.0, // distance in world units up to which objects retain visibility (>= 5% contrast)
Color::srgb(0.35, 0.5, 0.66), // atmospheric extinction color (after light is lost due to absorption by atmospheric particles)
Color::srgb(0.8, 0.844, 1.0), // atmospheric inscattering color (light gained due to scattering from the sun)
),
},
));
}
fn setup_terrain_scene(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
// Configure a properly scaled cascade shadow map for this scene (defaults are too large, mesh units are in km)
let cascade_shadow_config = CascadeShadowConfigBuilder {
first_cascade_far_bound: 0.3,
maximum_distance: 3.0,
..default()
}
.build();
// Sun
commands.spawn((
DirectionalLight {
color: Color::srgb(0.98, 0.95, 0.82),
shadows_enabled: true,
..default()
},
Transform::from_xyz(0.0, 0.0, 0.0).looking_at(Vec3::new(-0.15, -0.05, 0.25), Vec3::Y),
cascade_shadow_config,
));
// Terrain
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/terrain/Mountains.gltf"),
)));
// Sky
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(2.0, 1.0, 1.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("888888").unwrap().into(),
unlit: true,
cull_mode: None,
..default()
})),
Transform::from_scale(Vec3::splat(20.0)),
NotShadowCaster,
));
}
fn setup_instructions(mut commands: Commands) {
commands.spawn((Text::new("Press Spacebar to Toggle Atmospheric Fog.\nPress S to Toggle Directional Light Fog Influence."),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
})
);
}
fn toggle_system(keycode: Res<ButtonInput<KeyCode>>, mut fog: Single<&mut DistanceFog>) {
if keycode.just_pressed(KeyCode::Space) {
let a = fog.color.alpha();
fog.color.set_alpha(1.0 - a);
}
if keycode.just_pressed(KeyCode::KeyS) {
let a = fog.directional_light_color.alpha();
fog.directional_light_color.set_alpha(0.5 - a);
}
}

220
vendor/bevy/examples/3d/auto_exposure.rs vendored Normal file
View File

@@ -0,0 +1,220 @@
//! This example showcases auto exposure,
//! which automatically (but not instantly) adjusts the brightness of the scene in a way that mimics the function of the human eye.
//! Auto exposure requires compute shader capabilities, so it's not available on WebGL.
//!
//! ## Controls
//!
//! | Key Binding | Action |
//! |:-------------------|:---------------------------------------|
//! | `Left` / `Right` | Rotate Camera |
//! | `C` | Toggle Compensation Curve |
//! | `M` | Toggle Metering Mask |
//! | `V` | Visualize Metering Mask |
use bevy::{
core_pipeline::{
auto_exposure::{AutoExposure, AutoExposureCompensationCurve, AutoExposurePlugin},
Skybox,
},
math::{cubic_splines::LinearSpline, primitives::Plane3d, vec2},
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_plugins(AutoExposurePlugin)
.add_systems(Startup, setup)
.add_systems(Update, example_control_system)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut compensation_curves: ResMut<Assets<AutoExposureCompensationCurve>>,
asset_server: Res<AssetServer>,
) {
let metering_mask = asset_server.load("textures/basic_metering_mask.png");
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(1.0, 0.0, 0.0).looking_at(Vec3::ZERO, Vec3::Y),
AutoExposure {
metering_mask: metering_mask.clone(),
..default()
},
Skybox {
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
brightness: light_consts::lux::DIRECT_SUNLIGHT,
..default()
},
));
commands.insert_resource(ExampleResources {
basic_compensation_curve: compensation_curves.add(
AutoExposureCompensationCurve::from_curve(LinearSpline::new([
vec2(-4.0, -2.0),
vec2(0.0, 0.0),
vec2(2.0, 0.0),
vec2(4.0, 2.0),
]))
.unwrap(),
),
basic_metering_mask: metering_mask.clone(),
});
let plane = meshes.add(Mesh::from(
Plane3d {
normal: -Dir3::Z,
half_size: Vec2::new(2.0, 0.5),
}
.mesh(),
));
// Build a dimly lit box around the camera, with a slot to see the bright skybox.
for level in -1..=1 {
for side in [-Vec3::X, Vec3::X, -Vec3::Z, Vec3::Z] {
if level == 0 && Vec3::Z == side {
continue;
}
let height = Vec3::Y * level as f32;
commands.spawn((
Mesh3d(plane.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(
0.5 + side.x * 0.5,
0.75 - level as f32 * 0.25,
0.5 + side.z * 0.5,
),
..default()
})),
Transform::from_translation(side * 2.0 + height).looking_at(height, Vec3::Y),
));
}
}
commands.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 0.0,
..default()
});
commands.spawn((
PointLight {
intensity: 2000.0,
..default()
},
Transform::from_xyz(0.0, 0.0, 0.0),
));
commands.spawn((
ImageNode {
image: metering_mask,
..default()
},
Node {
width: Val::Percent(100.0),
height: Val::Percent(100.0),
..default()
},
));
let text_font = TextFont::default();
commands.spawn((Text::new("Left / Right - Rotate Camera\nC - Toggle Compensation Curve\nM - Toggle Metering Mask\nV - Visualize Metering Mask"),
text_font.clone(), Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
})
);
commands.spawn((
Text::default(),
text_font,
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
right: Val::Px(12.0),
..default()
},
ExampleDisplay,
));
}
#[derive(Component)]
struct ExampleDisplay;
#[derive(Resource)]
struct ExampleResources {
basic_compensation_curve: Handle<AutoExposureCompensationCurve>,
basic_metering_mask: Handle<Image>,
}
fn example_control_system(
camera: Single<(&mut Transform, &mut AutoExposure), With<Camera3d>>,
mut display: Single<&mut Text, With<ExampleDisplay>>,
mut mask_image: Single<&mut Node, With<ImageNode>>,
time: Res<Time>,
input: Res<ButtonInput<KeyCode>>,
resources: Res<ExampleResources>,
) {
let (mut camera_transform, mut auto_exposure) = camera.into_inner();
let rotation = if input.pressed(KeyCode::ArrowLeft) {
time.delta_secs()
} else if input.pressed(KeyCode::ArrowRight) {
-time.delta_secs()
} else {
0.0
};
camera_transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(rotation));
if input.just_pressed(KeyCode::KeyC) {
auto_exposure.compensation_curve =
if auto_exposure.compensation_curve == resources.basic_compensation_curve {
Handle::default()
} else {
resources.basic_compensation_curve.clone()
};
}
if input.just_pressed(KeyCode::KeyM) {
auto_exposure.metering_mask =
if auto_exposure.metering_mask == resources.basic_metering_mask {
Handle::default()
} else {
resources.basic_metering_mask.clone()
};
}
mask_image.display = if input.pressed(KeyCode::KeyV) {
Display::Flex
} else {
Display::None
};
display.0 = format!(
"Compensation Curve: {}\nMetering Mask: {}",
if auto_exposure.compensation_curve == resources.basic_compensation_curve {
"Enabled"
} else {
"Disabled"
},
if auto_exposure.metering_mask == resources.basic_metering_mask {
"Enabled"
} else {
"Disabled"
},
);
}

324
vendor/bevy/examples/3d/blend_modes.rs vendored Normal file
View File

@@ -0,0 +1,324 @@
//! This example showcases different blend modes.
//!
//! ## Controls
//!
//! | Key Binding | Action |
//! |:-------------------|:------------------------------------|
//! | `Up` / `Down` | Increase / Decrease Alpha |
//! | `Left` / `Right` | Rotate Camera |
//! | `H` | Toggle HDR |
//! | `Spacebar` | Toggle Unlit |
//! | `C` | Randomize Colors |
use bevy::{color::palettes::css::ORANGE, prelude::*};
use rand::random;
fn main() {
let mut app = App::new();
app.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, example_control_system);
app.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
let base_color = Color::srgb(0.9, 0.2, 0.3);
let icosphere_mesh = meshes.add(Sphere::new(0.9).mesh().ico(7).unwrap());
// Opaque
let opaque = commands
.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color,
alpha_mode: AlphaMode::Opaque,
..default()
})),
Transform::from_xyz(-4.0, 0.0, 0.0),
ExampleControls {
unlit: true,
color: true,
},
))
.id();
// Blend
let blend = commands
.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color,
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_xyz(-2.0, 0.0, 0.0),
ExampleControls {
unlit: true,
color: true,
},
))
.id();
// Premultiplied
let premultiplied = commands
.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color,
alpha_mode: AlphaMode::Premultiplied,
..default()
})),
Transform::from_xyz(0.0, 0.0, 0.0),
ExampleControls {
unlit: true,
color: true,
},
))
.id();
// Add
let add = commands
.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color,
alpha_mode: AlphaMode::Add,
..default()
})),
Transform::from_xyz(2.0, 0.0, 0.0),
ExampleControls {
unlit: true,
color: true,
},
))
.id();
// Multiply
let multiply = commands
.spawn((
Mesh3d(icosphere_mesh),
MeshMaterial3d(materials.add(StandardMaterial {
base_color,
alpha_mode: AlphaMode::Multiply,
..default()
})),
Transform::from_xyz(4.0, 0.0, 0.0),
ExampleControls {
unlit: true,
color: true,
},
))
.id();
// Chessboard Plane
let black_material = materials.add(Color::BLACK);
let white_material = materials.add(Color::WHITE);
let plane_mesh = meshes.add(Plane3d::default().mesh().size(2.0, 2.0));
for x in -3..4 {
for z in -3..4 {
commands.spawn((
Mesh3d(plane_mesh.clone()),
MeshMaterial3d(if (x + z) % 2 == 0 {
black_material.clone()
} else {
white_material.clone()
}),
Transform::from_xyz(x as f32 * 2.0, -1.0, z as f32 * 2.0),
ExampleControls {
unlit: false,
color: true,
},
));
}
}
// Light
commands.spawn((PointLight::default(), Transform::from_xyz(4.0, 8.0, 4.0)));
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 2.5, 10.0).looking_at(Vec3::ZERO, Vec3::Y),
// Unfortunately, MSAA and HDR are not supported simultaneously under WebGL.
// Since this example uses HDR, we must disable MSAA for Wasm builds, at least
// until WebGPU is ready and no longer behind a feature flag in Web browsers.
#[cfg(target_arch = "wasm32")]
Msaa::Off,
));
// Controls Text
// We need the full version of this font so we can use box drawing characters.
let text_style = TextFont {
font: asset_server.load("fonts/FiraMono-Medium.ttf"),
..default()
};
let label_text_style = (text_style.clone(), TextColor(ORANGE.into()));
commands.spawn((Text::new("Up / Down — Increase / Decrease Alpha\nLeft / Right — Rotate Camera\nH - Toggle HDR\nSpacebar — Toggle Unlit\nC — Randomize Colors"),
text_style.clone(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
})
);
commands.spawn((
Text::default(),
text_style,
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
right: Val::Px(12.0),
..default()
},
ExampleDisplay,
));
let mut label = |entity: Entity, label: &str| {
commands
.spawn((
Node {
position_type: PositionType::Absolute,
..default()
},
ExampleLabel { entity },
))
.with_children(|parent| {
parent.spawn((
Text::new(label),
label_text_style.clone(),
Node {
position_type: PositionType::Absolute,
bottom: Val::ZERO,
..default()
},
TextLayout::default().with_no_wrap(),
));
});
};
label(opaque, "┌─ Opaque\n\n\n\n");
label(blend, "┌─ Blend\n\n\n");
label(premultiplied, "┌─ Premultiplied\n\n");
label(add, "┌─ Add\n");
label(multiply, "┌─ Multiply");
}
#[derive(Component)]
struct ExampleControls {
unlit: bool,
color: bool,
}
#[derive(Component)]
struct ExampleLabel {
entity: Entity,
}
struct ExampleState {
alpha: f32,
unlit: bool,
}
#[derive(Component)]
struct ExampleDisplay;
impl Default for ExampleState {
fn default() -> Self {
ExampleState {
alpha: 0.9,
unlit: false,
}
}
}
fn example_control_system(
mut materials: ResMut<Assets<StandardMaterial>>,
controllable: Query<(&MeshMaterial3d<StandardMaterial>, &ExampleControls)>,
camera: Single<(&mut Camera, &mut Transform, &GlobalTransform), With<Camera3d>>,
mut labels: Query<(&mut Node, &ExampleLabel)>,
mut display: Single<&mut Text, With<ExampleDisplay>>,
labeled: Query<&GlobalTransform>,
mut state: Local<ExampleState>,
time: Res<Time>,
input: Res<ButtonInput<KeyCode>>,
) {
if input.pressed(KeyCode::ArrowUp) {
state.alpha = (state.alpha + time.delta_secs()).min(1.0);
} else if input.pressed(KeyCode::ArrowDown) {
state.alpha = (state.alpha - time.delta_secs()).max(0.0);
}
if input.just_pressed(KeyCode::Space) {
state.unlit = !state.unlit;
}
let randomize_colors = input.just_pressed(KeyCode::KeyC);
for (material_handle, controls) in &controllable {
let material = materials.get_mut(material_handle).unwrap();
if controls.color && randomize_colors {
material.base_color = Srgba {
red: random(),
green: random(),
blue: random(),
alpha: state.alpha,
}
.into();
} else {
material.base_color.set_alpha(state.alpha);
}
if controls.unlit {
material.unlit = state.unlit;
}
}
let (mut camera, mut camera_transform, camera_global_transform) = camera.into_inner();
if input.just_pressed(KeyCode::KeyH) {
camera.hdr = !camera.hdr;
}
let rotation = if input.pressed(KeyCode::ArrowLeft) {
time.delta_secs()
} else if input.pressed(KeyCode::ArrowRight) {
-time.delta_secs()
} else {
0.0
};
camera_transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(rotation));
for (mut node, label) in &mut labels {
let world_position = labeled.get(label.entity).unwrap().translation() + Vec3::Y;
let viewport_position = camera
.world_to_viewport(camera_global_transform, world_position)
.unwrap();
node.top = Val::Px(viewport_position.y);
node.left = Val::Px(viewport_position.x);
}
display.0 = format!(
" HDR: {}\nAlpha: {:.2}",
if camera.hdr { "ON " } else { "OFF" },
state.alpha
);
}

228
vendor/bevy/examples/3d/bloom_3d.rs vendored Normal file
View File

@@ -0,0 +1,228 @@
//! Illustrates bloom post-processing using HDR and emissive materials.
use bevy::{
core_pipeline::{
bloom::{Bloom, BloomCompositeMode},
tonemapping::Tonemapping,
},
math::ops,
prelude::*,
};
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup_scene)
.add_systems(Update, (update_bloom_settings, bounce_spheres))
.run();
}
fn setup_scene(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
commands.spawn((
Camera3d::default(),
Camera {
hdr: true, // 1. HDR is required for bloom
clear_color: ClearColorConfig::Custom(Color::BLACK),
..default()
},
Tonemapping::TonyMcMapface, // 2. Using a tonemapper that desaturates to white is recommended
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
Bloom::NATURAL, // 3. Enable bloom for the camera
));
let material_emissive1 = materials.add(StandardMaterial {
emissive: LinearRgba::rgb(0.0, 0.0, 150.0), // 4. Put something bright in a dark environment to see the effect
..default()
});
let material_emissive2 = materials.add(StandardMaterial {
emissive: LinearRgba::rgb(1000.0, 1000.0, 1000.0),
..default()
});
let material_emissive3 = materials.add(StandardMaterial {
emissive: LinearRgba::rgb(50.0, 0.0, 0.0),
..default()
});
let material_non_emissive = materials.add(StandardMaterial {
base_color: Color::BLACK,
..default()
});
let mesh = meshes.add(Sphere::new(0.4).mesh().ico(5).unwrap());
for x in -5..5 {
for z in -5..5 {
// This generates a pseudo-random integer between `[0, 6)`, but deterministically so
// the same spheres are always the same colors.
let mut hasher = DefaultHasher::new();
(x, z).hash(&mut hasher);
let rand = (hasher.finish() + 3) % 6;
let (material, scale) = match rand {
0 => (material_emissive1.clone(), 0.5),
1 => (material_emissive2.clone(), 0.1),
2 => (material_emissive3.clone(), 1.0),
3..=5 => (material_non_emissive.clone(), 1.5),
_ => unreachable!(),
};
commands.spawn((
Mesh3d(mesh.clone()),
MeshMaterial3d(material),
Transform::from_xyz(x as f32 * 2.0, 0.0, z as f32 * 2.0)
.with_scale(Vec3::splat(scale)),
Bouncing,
));
}
}
// example instructions
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// ------------------------------------------------------------------------------------------------
fn update_bloom_settings(
camera: Single<(Entity, Option<&mut Bloom>), With<Camera>>,
mut text: Single<&mut Text>,
mut commands: Commands,
keycode: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
) {
let bloom = camera.into_inner();
match bloom {
(entity, Some(mut bloom)) => {
text.0 = "Bloom (Toggle: Space)\n".to_string();
text.push_str(&format!("(Q/A) Intensity: {}\n", bloom.intensity));
text.push_str(&format!(
"(W/S) Low-frequency boost: {}\n",
bloom.low_frequency_boost
));
text.push_str(&format!(
"(E/D) Low-frequency boost curvature: {}\n",
bloom.low_frequency_boost_curvature
));
text.push_str(&format!(
"(R/F) High-pass frequency: {}\n",
bloom.high_pass_frequency
));
text.push_str(&format!(
"(T/G) Mode: {}\n",
match bloom.composite_mode {
BloomCompositeMode::EnergyConserving => "Energy-conserving",
BloomCompositeMode::Additive => "Additive",
}
));
text.push_str(&format!("(Y/H) Threshold: {}\n", bloom.prefilter.threshold));
text.push_str(&format!(
"(U/J) Threshold softness: {}\n",
bloom.prefilter.threshold_softness
));
text.push_str(&format!("(I/K) Horizontal Scale: {}\n", bloom.scale.x));
if keycode.just_pressed(KeyCode::Space) {
commands.entity(entity).remove::<Bloom>();
}
let dt = time.delta_secs();
if keycode.pressed(KeyCode::KeyA) {
bloom.intensity -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyQ) {
bloom.intensity += dt / 10.0;
}
bloom.intensity = bloom.intensity.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyS) {
bloom.low_frequency_boost -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyW) {
bloom.low_frequency_boost += dt / 10.0;
}
bloom.low_frequency_boost = bloom.low_frequency_boost.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyD) {
bloom.low_frequency_boost_curvature -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyE) {
bloom.low_frequency_boost_curvature += dt / 10.0;
}
bloom.low_frequency_boost_curvature =
bloom.low_frequency_boost_curvature.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyF) {
bloom.high_pass_frequency -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyR) {
bloom.high_pass_frequency += dt / 10.0;
}
bloom.high_pass_frequency = bloom.high_pass_frequency.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyG) {
bloom.composite_mode = BloomCompositeMode::Additive;
}
if keycode.pressed(KeyCode::KeyT) {
bloom.composite_mode = BloomCompositeMode::EnergyConserving;
}
if keycode.pressed(KeyCode::KeyH) {
bloom.prefilter.threshold -= dt;
}
if keycode.pressed(KeyCode::KeyY) {
bloom.prefilter.threshold += dt;
}
bloom.prefilter.threshold = bloom.prefilter.threshold.max(0.0);
if keycode.pressed(KeyCode::KeyJ) {
bloom.prefilter.threshold_softness -= dt / 10.0;
}
if keycode.pressed(KeyCode::KeyU) {
bloom.prefilter.threshold_softness += dt / 10.0;
}
bloom.prefilter.threshold_softness = bloom.prefilter.threshold_softness.clamp(0.0, 1.0);
if keycode.pressed(KeyCode::KeyK) {
bloom.scale.x -= dt * 2.0;
}
if keycode.pressed(KeyCode::KeyI) {
bloom.scale.x += dt * 2.0;
}
bloom.scale.x = bloom.scale.x.clamp(0.0, 8.0);
}
(entity, None) => {
text.0 = "Bloom: Off (Toggle: Space)".to_string();
if keycode.just_pressed(KeyCode::Space) {
commands.entity(entity).insert(Bloom::NATURAL);
}
}
}
}
#[derive(Component)]
struct Bouncing;
fn bounce_spheres(time: Res<Time>, mut query: Query<&mut Transform, With<Bouncing>>) {
for mut transform in query.iter_mut() {
transform.translation.y =
ops::sin(transform.translation.x + transform.translation.z + time.elapsed_secs());
}
}

View File

@@ -0,0 +1,317 @@
//! Demonstrates different sub view effects.
//!
//! A sub view is essentially a smaller section of a larger viewport. Some use
//! cases include:
//! - Split one image across multiple cameras, for use in a multimonitor setups
//! - Magnify a section of the image, by rendering a small sub view in another
//! camera
//! - Rapidly change the sub view offset to get a screen shake effect
use bevy::{
prelude::*,
render::camera::{ScalingMode, SubCameraView, Viewport},
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, (move_camera_view, resize_viewports))
.run();
}
#[derive(Debug, Component)]
struct MovingCameraMarker;
/// Set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
let transform = Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y);
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// Cube
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// Light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
));
// Main perspective camera:
//
// The main perspective image to use as a comparison for the sub views.
commands.spawn((
Camera3d::default(),
Camera::default(),
ExampleViewports::PerspectiveMain,
transform,
));
// Perspective camera right half:
//
// For this camera, the projection is perspective, and `size` is half the
// width of the `full_size`, while the x value of `offset` is set to half
// the value of the full width, causing the right half of the image to be
// shown. Since the viewport has an aspect ratio of 1x1 and the sub view has
// an aspect ratio of 1x2, the image appears stretched along the horizontal
// axis.
commands.spawn((
Camera3d::default(),
Camera {
sub_camera_view: Some(SubCameraView {
// The values of `full_size` and `size` do not have to be the
// exact values of your physical viewport. The important part is
// the ratio between them.
full_size: UVec2::new(10, 10),
// The `offset` is also relative to the values in `full_size`
// and `size`
offset: Vec2::new(5.0, 0.0),
size: UVec2::new(5, 10),
}),
order: 1,
..default()
},
ExampleViewports::PerspectiveStretched,
transform,
));
// Perspective camera moving:
//
// For this camera, the projection is perspective, and the offset is updated
// continuously in 150 units per second in `move_camera_view`. Since the
// `full_size` is 500x500, the image should appear to be moving across the
// full image once every 3.3 seconds. `size` is a fifth of the size of
// `full_size`, so the image will appear zoomed in.
commands.spawn((
Camera3d::default(),
Camera {
sub_camera_view: Some(SubCameraView {
full_size: UVec2::new(500, 500),
offset: Vec2::ZERO,
size: UVec2::new(100, 100),
}),
order: 2,
..default()
},
transform,
ExampleViewports::PerspectiveMoving,
MovingCameraMarker,
));
// Perspective camera different aspect ratio:
//
// For this camera, the projection is perspective, and the aspect ratio of
// the sub view (2x1) is different to the aspect ratio of the full view
// (2x2). The aspect ratio of the sub view matches the aspect ratio of
// the viewport and should show an unstretched image of the top half of the
// full perspective image.
commands.spawn((
Camera3d::default(),
Camera {
sub_camera_view: Some(SubCameraView {
full_size: UVec2::new(800, 800),
offset: Vec2::ZERO,
size: UVec2::new(800, 400),
}),
order: 3,
..default()
},
ExampleViewports::PerspectiveControl,
transform,
));
// Main orthographic camera:
//
// The main orthographic image to use as a comparison for the sub views.
commands.spawn((
Camera3d::default(),
Projection::from(OrthographicProjection {
scaling_mode: ScalingMode::FixedVertical {
viewport_height: 6.0,
},
..OrthographicProjection::default_3d()
}),
Camera {
order: 4,
..default()
},
ExampleViewports::OrthographicMain,
transform,
));
// Orthographic camera left half:
//
// For this camera, the projection is orthographic, and `size` is half the
// width of the `full_size`, causing the left half of the image to be shown.
// Since the viewport has an aspect ratio of 1x1 and the sub view has an
// aspect ratio of 1x2, the image appears stretched along the horizontal axis.
commands.spawn((
Camera3d::default(),
Projection::from(OrthographicProjection {
scaling_mode: ScalingMode::FixedVertical {
viewport_height: 6.0,
},
..OrthographicProjection::default_3d()
}),
Camera {
sub_camera_view: Some(SubCameraView {
full_size: UVec2::new(2, 2),
offset: Vec2::ZERO,
size: UVec2::new(1, 2),
}),
order: 5,
..default()
},
ExampleViewports::OrthographicStretched,
transform,
));
// Orthographic camera moving:
//
// For this camera, the projection is orthographic, and the offset is
// updated continuously in 150 units per second in `move_camera_view`. Since
// the `full_size` is 500x500, the image should appear to be moving across
// the full image once every 3.3 seconds. `size` is a fifth of the size of
// `full_size`, so the image will appear zoomed in.
commands.spawn((
Camera3d::default(),
Projection::from(OrthographicProjection {
scaling_mode: ScalingMode::FixedVertical {
viewport_height: 6.0,
},
..OrthographicProjection::default_3d()
}),
Camera {
sub_camera_view: Some(SubCameraView {
full_size: UVec2::new(500, 500),
offset: Vec2::ZERO,
size: UVec2::new(100, 100),
}),
order: 6,
..default()
},
transform,
ExampleViewports::OrthographicMoving,
MovingCameraMarker,
));
// Orthographic camera different aspect ratio:
//
// For this camera, the projection is orthographic, and the aspect ratio of
// the sub view (2x1) is different to the aspect ratio of the full view
// (2x2). The aspect ratio of the sub view matches the aspect ratio of
// the viewport and should show an unstretched image of the top half of the
// full orthographic image.
commands.spawn((
Camera3d::default(),
Projection::from(OrthographicProjection {
scaling_mode: ScalingMode::FixedVertical {
viewport_height: 6.0,
},
..OrthographicProjection::default_3d()
}),
Camera {
sub_camera_view: Some(SubCameraView {
full_size: UVec2::new(200, 200),
offset: Vec2::ZERO,
size: UVec2::new(200, 100),
}),
order: 7,
..default()
},
ExampleViewports::OrthographicControl,
transform,
));
}
fn move_camera_view(
mut movable_camera_query: Query<&mut Camera, With<MovingCameraMarker>>,
time: Res<Time>,
) {
for mut camera in movable_camera_query.iter_mut() {
if let Some(sub_view) = &mut camera.sub_camera_view {
sub_view.offset.x = (time.elapsed_secs() * 150.) % 450.0 - 50.0;
sub_view.offset.y = sub_view.offset.x;
}
}
}
// To ensure viewports remain the same at any window size
fn resize_viewports(
window: Single<&Window, With<bevy::window::PrimaryWindow>>,
mut viewports: Query<(&mut Camera, &ExampleViewports)>,
) {
let window_size = window.physical_size();
let small_height = window_size.y / 5;
let small_width = window_size.x / 8;
let large_height = small_height * 4;
let large_width = small_width * 4;
let large_size = UVec2::new(large_width, large_height);
// Enforce the aspect ratio of the small viewports to ensure the images
// appear unstretched
let small_dim = small_height.min(small_width);
let small_size = UVec2::new(small_dim, small_dim);
let small_wide_size = UVec2::new(small_dim * 2, small_dim);
for (mut camera, example_viewport) in viewports.iter_mut() {
if camera.viewport.is_none() {
camera.viewport = Some(Viewport::default());
};
let Some(viewport) = &mut camera.viewport else {
continue;
};
let (size, position) = match example_viewport {
ExampleViewports::PerspectiveMain => (large_size, UVec2::new(0, small_height)),
ExampleViewports::PerspectiveStretched => (small_size, UVec2::ZERO),
ExampleViewports::PerspectiveMoving => (small_size, UVec2::new(small_width, 0)),
ExampleViewports::PerspectiveControl => {
(small_wide_size, UVec2::new(small_width * 2, 0))
}
ExampleViewports::OrthographicMain => {
(large_size, UVec2::new(large_width, small_height))
}
ExampleViewports::OrthographicStretched => (small_size, UVec2::new(small_width * 4, 0)),
ExampleViewports::OrthographicMoving => (small_size, UVec2::new(small_width * 5, 0)),
ExampleViewports::OrthographicControl => {
(small_wide_size, UVec2::new(small_width * 6, 0))
}
};
viewport.physical_size = size;
viewport.physical_position = position;
}
}
#[derive(Component)]
enum ExampleViewports {
PerspectiveMain,
PerspectiveStretched,
PerspectiveMoving,
PerspectiveControl,
OrthographicMain,
OrthographicStretched,
OrthographicMoving,
OrthographicControl,
}

322
vendor/bevy/examples/3d/clearcoat.rs vendored Normal file
View File

@@ -0,0 +1,322 @@
//! Demonstrates the clearcoat PBR feature.
//!
//! Clearcoat is a separate material layer that represents a thin translucent
//! layer over a material. Examples include (from the Filament spec [1]) car paint,
//! soda cans, and lacquered wood.
//!
//! In glTF, clearcoat is supported via the `KHR_materials_clearcoat` [2]
//! extension. This extension is well supported by tools; in particular,
//! Blender's glTF exporter maps the clearcoat feature of its Principled BSDF
//! node to this extension, allowing it to appear in Bevy.
//!
//! This Bevy example is inspired by the corresponding three.js example [3].
//!
//! [1]: https://google.github.io/filament/Filament.html#materialsystem/clearcoatmodel
//!
//! [2]: https://github.com/KhronosGroup/glTF/blob/main/extensions/2.0/Khronos/KHR_materials_clearcoat/README.md
//!
//! [3]: https://threejs.org/examples/webgl_materials_physical_clearcoat.html
use std::f32::consts::PI;
use bevy::{
color::palettes::css::{BLUE, GOLD, WHITE},
core_pipeline::{tonemapping::Tonemapping::AcesFitted, Skybox},
image::ImageLoaderSettings,
math::vec3,
prelude::*,
};
/// The size of each sphere.
const SPHERE_SCALE: f32 = 0.9;
/// The speed at which the spheres rotate, in radians per second.
const SPHERE_ROTATION_SPEED: f32 = 0.8;
/// Which type of light we're using: a point light or a directional light.
#[derive(Clone, Copy, PartialEq, Resource, Default)]
enum LightMode {
#[default]
Point,
Directional,
}
/// Tags the example spheres.
#[derive(Component)]
struct ExampleSphere;
/// Entry point.
pub fn main() {
App::new()
.init_resource::<LightMode>()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, animate_light)
.add_systems(Update, animate_spheres)
.add_systems(Update, (handle_input, update_help_text).chain())
.run();
}
/// Initializes the scene.
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
light_mode: Res<LightMode>,
) {
let sphere = create_sphere_mesh(&mut meshes);
spawn_car_paint_sphere(&mut commands, &mut materials, &asset_server, &sphere);
spawn_coated_glass_bubble_sphere(&mut commands, &mut materials, &sphere);
spawn_golf_ball(&mut commands, &asset_server);
spawn_scratched_gold_ball(&mut commands, &mut materials, &asset_server, &sphere);
spawn_light(&mut commands);
spawn_camera(&mut commands, &asset_server);
spawn_text(&mut commands, &light_mode);
}
/// Generates a sphere.
fn create_sphere_mesh(meshes: &mut Assets<Mesh>) -> Handle<Mesh> {
// We're going to use normal maps, so make sure we've generated tangents, or
// else the normal maps won't show up.
let mut sphere_mesh = Sphere::new(1.0).mesh().build();
sphere_mesh
.generate_tangents()
.expect("Failed to generate tangents");
meshes.add(sphere_mesh)
}
/// Spawn a regular object with a clearcoat layer. This looks like car paint.
fn spawn_car_paint_sphere(
commands: &mut Commands,
materials: &mut Assets<StandardMaterial>,
asset_server: &AssetServer,
sphere: &Handle<Mesh>,
) {
commands
.spawn((
Mesh3d(sphere.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
clearcoat: 1.0,
clearcoat_perceptual_roughness: 0.1,
normal_map_texture: Some(asset_server.load_with_settings(
"textures/BlueNoise-Normal.png",
|settings: &mut ImageLoaderSettings| settings.is_srgb = false,
)),
metallic: 0.9,
perceptual_roughness: 0.5,
base_color: BLUE.into(),
..default()
})),
Transform::from_xyz(-1.0, 1.0, 0.0).with_scale(Vec3::splat(SPHERE_SCALE)),
))
.insert(ExampleSphere);
}
/// Spawn a semitransparent object with a clearcoat layer.
fn spawn_coated_glass_bubble_sphere(
commands: &mut Commands,
materials: &mut Assets<StandardMaterial>,
sphere: &Handle<Mesh>,
) {
commands
.spawn((
Mesh3d(sphere.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
clearcoat: 1.0,
clearcoat_perceptual_roughness: 0.1,
metallic: 0.5,
perceptual_roughness: 0.1,
base_color: Color::srgba(0.9, 0.9, 0.9, 0.3),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_xyz(-1.0, -1.0, 0.0).with_scale(Vec3::splat(SPHERE_SCALE)),
))
.insert(ExampleSphere);
}
/// Spawns an object with both a clearcoat normal map (a scratched varnish) and
/// a main layer normal map (the golf ball pattern).
///
/// This object is in glTF format, using the `KHR_materials_clearcoat`
/// extension.
fn spawn_golf_ball(commands: &mut Commands, asset_server: &AssetServer) {
commands.spawn((
SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/GolfBall/GolfBall.glb")),
),
Transform::from_xyz(1.0, 1.0, 0.0).with_scale(Vec3::splat(SPHERE_SCALE)),
ExampleSphere,
));
}
/// Spawns an object with only a clearcoat normal map (a scratch pattern) and no
/// main layer normal map.
fn spawn_scratched_gold_ball(
commands: &mut Commands,
materials: &mut Assets<StandardMaterial>,
asset_server: &AssetServer,
sphere: &Handle<Mesh>,
) {
commands
.spawn((
Mesh3d(sphere.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
clearcoat: 1.0,
clearcoat_perceptual_roughness: 0.3,
clearcoat_normal_texture: Some(asset_server.load_with_settings(
"textures/ScratchedGold-Normal.png",
|settings: &mut ImageLoaderSettings| settings.is_srgb = false,
)),
metallic: 0.9,
perceptual_roughness: 0.1,
base_color: GOLD.into(),
..default()
})),
Transform::from_xyz(1.0, -1.0, 0.0).with_scale(Vec3::splat(SPHERE_SCALE)),
))
.insert(ExampleSphere);
}
/// Spawns a light.
fn spawn_light(commands: &mut Commands) {
commands.spawn(create_point_light());
}
/// Spawns a camera with associated skybox and environment map.
fn spawn_camera(commands: &mut Commands, asset_server: &AssetServer) {
commands
.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Projection::Perspective(PerspectiveProjection {
fov: 27.0 / 180.0 * PI,
..default()
}),
Transform::from_xyz(0.0, 0.0, 10.0),
AcesFitted,
))
.insert(Skybox {
brightness: 5000.0,
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
..default()
})
.insert(EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2000.0,
..default()
});
}
/// Spawns the help text.
fn spawn_text(commands: &mut Commands, light_mode: &LightMode) {
commands.spawn((
light_mode.create_help_text(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// Moves the light around.
fn animate_light(
mut lights: Query<&mut Transform, Or<(With<PointLight>, With<DirectionalLight>)>>,
time: Res<Time>,
) {
let now = time.elapsed_secs();
for mut transform in lights.iter_mut() {
transform.translation = vec3(
ops::sin(now * 1.4),
ops::cos(now * 1.0),
ops::cos(now * 0.6),
) * vec3(3.0, 4.0, 3.0);
transform.look_at(Vec3::ZERO, Vec3::Y);
}
}
/// Rotates the spheres.
fn animate_spheres(mut spheres: Query<&mut Transform, With<ExampleSphere>>, time: Res<Time>) {
let now = time.elapsed_secs();
for mut transform in spheres.iter_mut() {
transform.rotation = Quat::from_rotation_y(SPHERE_ROTATION_SPEED * now);
}
}
/// Handles the user pressing Space to change the type of light from point to
/// directional and vice versa.
fn handle_input(
mut commands: Commands,
mut light_query: Query<Entity, Or<(With<PointLight>, With<DirectionalLight>)>>,
keyboard: Res<ButtonInput<KeyCode>>,
mut light_mode: ResMut<LightMode>,
) {
if !keyboard.just_pressed(KeyCode::Space) {
return;
}
for light in light_query.iter_mut() {
match *light_mode {
LightMode::Point => {
*light_mode = LightMode::Directional;
commands
.entity(light)
.remove::<PointLight>()
.insert(create_directional_light());
}
LightMode::Directional => {
*light_mode = LightMode::Point;
commands
.entity(light)
.remove::<DirectionalLight>()
.insert(create_point_light());
}
}
}
}
/// Updates the help text at the bottom of the screen.
fn update_help_text(mut text_query: Query<&mut Text>, light_mode: Res<LightMode>) {
for mut text in text_query.iter_mut() {
*text = light_mode.create_help_text();
}
}
/// Creates or recreates the moving point light.
fn create_point_light() -> PointLight {
PointLight {
color: WHITE.into(),
intensity: 100000.0,
..default()
}
}
/// Creates or recreates the moving directional light.
fn create_directional_light() -> DirectionalLight {
DirectionalLight {
color: WHITE.into(),
illuminance: 1000.0,
..default()
}
}
impl LightMode {
/// Creates the help text at the bottom of the screen.
fn create_help_text(&self) -> Text {
let help_text = match *self {
LightMode::Point => "Press Space to switch to a directional light",
LightMode::Directional => "Press Space to switch to a point light",
};
Text::new(help_text)
}
}

View File

@@ -0,0 +1,553 @@
//! Demonstrates clustered decals, which affix decals to surfaces.
use std::f32::consts::{FRAC_PI_3, PI};
use std::fmt::{self, Formatter};
use std::process;
use bevy::{
color::palettes::css::{LIME, ORANGE_RED, SILVER},
input::mouse::AccumulatedMouseMotion,
pbr::{
decal::{self, clustered::ClusteredDecal},
ExtendedMaterial, MaterialExtension,
},
prelude::*,
render::{
render_resource::{AsBindGroup, ShaderRef},
renderer::{RenderAdapter, RenderDevice},
},
window::SystemCursorIcon,
winit::cursor::CursorIcon,
};
use ops::{acos, cos, sin};
use widgets::{
WidgetClickEvent, WidgetClickSender, BUTTON_BORDER, BUTTON_BORDER_COLOR,
BUTTON_BORDER_RADIUS_SIZE, BUTTON_PADDING,
};
#[path = "../helpers/widgets.rs"]
mod widgets;
/// The custom material shader that we use to demonstrate how to use the decal
/// `tag` field.
const SHADER_ASSET_PATH: &str = "shaders/custom_clustered_decal.wgsl";
/// The speed at which the cube rotates, in radians per frame.
const CUBE_ROTATION_SPEED: f32 = 0.02;
/// The speed at which the selection can be moved, in spherical coordinate
/// radians per mouse unit.
const MOVE_SPEED: f32 = 0.008;
/// The speed at which the selection can be scaled, in reciprocal mouse units.
const SCALE_SPEED: f32 = 0.05;
/// The speed at which the selection can be scaled, in radians per mouse unit.
const ROLL_SPEED: f32 = 0.01;
/// Various settings for the demo.
#[derive(Resource, Default)]
struct AppStatus {
/// The object that will be moved, scaled, or rotated when the mouse is
/// dragged.
selection: Selection,
/// What happens when the mouse is dragged: one of a move, rotate, or scale
/// operation.
drag_mode: DragMode,
}
/// The object that will be moved, scaled, or rotated when the mouse is dragged.
#[derive(Clone, Copy, Component, Default, PartialEq)]
enum Selection {
/// The camera.
///
/// The camera can only be moved, not scaled or rotated.
#[default]
Camera,
/// The first decal, which an orange bounding box surrounds.
DecalA,
/// The second decal, which a lime green bounding box surrounds.
DecalB,
}
impl fmt::Display for Selection {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
Selection::Camera => f.write_str("camera"),
Selection::DecalA => f.write_str("decal A"),
Selection::DecalB => f.write_str("decal B"),
}
}
}
/// What happens when the mouse is dragged: one of a move, rotate, or scale
/// operation.
#[derive(Clone, Copy, Component, Default, PartialEq, Debug)]
enum DragMode {
/// The mouse moves the current selection.
#[default]
Move,
/// The mouse scales the current selection.
///
/// This only applies to decals, not cameras.
Scale,
/// The mouse rotates the current selection around its local Z axis.
///
/// This only applies to decals, not cameras.
Roll,
}
impl fmt::Display for DragMode {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
DragMode::Move => f.write_str("move"),
DragMode::Scale => f.write_str("scale"),
DragMode::Roll => f.write_str("roll"),
}
}
}
/// A marker component for the help text in the top left corner of the window.
#[derive(Clone, Copy, Component)]
struct HelpText;
/// A shader extension that demonstrates how to use the `tag` field to customize
/// the appearance of your decals.
#[derive(Asset, AsBindGroup, Reflect, Debug, Clone)]
struct CustomDecalExtension {}
impl MaterialExtension for CustomDecalExtension {
fn fragment_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
}
/// Entry point.
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Clustered Decals Example".into(),
..default()
}),
..default()
}))
.add_plugins(MaterialPlugin::<
ExtendedMaterial<StandardMaterial, CustomDecalExtension>,
>::default())
.init_resource::<AppStatus>()
.add_event::<WidgetClickEvent<Selection>>()
.add_systems(Startup, setup)
.add_systems(Update, draw_gizmos)
.add_systems(Update, rotate_cube)
.add_systems(Update, widgets::handle_ui_interactions::<Selection>)
.add_systems(
Update,
(handle_selection_change, update_radio_buttons)
.after(widgets::handle_ui_interactions::<Selection>),
)
.add_systems(Update, process_move_input)
.add_systems(Update, process_scale_input)
.add_systems(Update, process_roll_input)
.add_systems(Update, switch_drag_mode)
.add_systems(Update, update_help_text)
.add_systems(Update, update_button_visibility)
.run();
}
/// Creates the scene.
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
app_status: Res<AppStatus>,
render_device: Res<RenderDevice>,
render_adapter: Res<RenderAdapter>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, CustomDecalExtension>>>,
) {
// Error out if clustered decals aren't supported on the current platform.
if !decal::clustered::clustered_decals_are_usable(&render_device, &render_adapter) {
eprintln!("Clustered decals aren't usable on this platform.");
process::exit(1);
}
spawn_cube(&mut commands, &mut meshes, &mut materials);
spawn_camera(&mut commands);
spawn_light(&mut commands);
spawn_decals(&mut commands, &asset_server);
spawn_buttons(&mut commands);
spawn_help_text(&mut commands, &app_status);
}
/// Spawns the cube onto which the decals are projected.
fn spawn_cube(
commands: &mut Commands,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<ExtendedMaterial<StandardMaterial, CustomDecalExtension>>,
) {
// Rotate the cube a bit just to make it more interesting.
let mut transform = Transform::IDENTITY;
transform.rotate_y(FRAC_PI_3);
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(3.0, 3.0, 3.0))),
MeshMaterial3d(materials.add(ExtendedMaterial {
base: StandardMaterial {
base_color: SILVER.into(),
..default()
},
extension: CustomDecalExtension {},
})),
transform,
));
}
/// Spawns the directional light.
fn spawn_light(commands: &mut Commands) {
commands.spawn((
DirectionalLight::default(),
Transform::from_xyz(4.0, 8.0, 4.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}
/// Spawns the camera.
fn spawn_camera(commands: &mut Commands) {
commands
.spawn(Camera3d::default())
.insert(Transform::from_xyz(0.0, 2.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y))
// Tag the camera with `Selection::Camera`.
.insert(Selection::Camera);
}
/// Spawns the actual clustered decals.
fn spawn_decals(commands: &mut Commands, asset_server: &AssetServer) {
let image = asset_server.load("branding/icon.png");
commands.spawn((
ClusteredDecal {
image: image.clone(),
// Tint with red.
tag: 1,
},
calculate_initial_decal_transform(vec3(1.0, 3.0, 5.0), Vec3::ZERO, Vec2::splat(1.1)),
Selection::DecalA,
));
commands.spawn((
ClusteredDecal {
image: image.clone(),
// Tint with blue.
tag: 2,
},
calculate_initial_decal_transform(vec3(-2.0, -1.0, 4.0), Vec3::ZERO, Vec2::splat(2.0)),
Selection::DecalB,
));
}
/// Spawns the buttons at the bottom of the screen.
fn spawn_buttons(commands: &mut Commands) {
// Spawn the radio buttons that allow the user to select an object to
// control.
commands
.spawn(widgets::main_ui_node())
.with_children(|parent| {
widgets::spawn_option_buttons(
parent,
"Drag to Move",
&[
(Selection::Camera, "Camera"),
(Selection::DecalA, "Decal A"),
(Selection::DecalB, "Decal B"),
],
);
});
// Spawn the drag buttons that allow the user to control the scale and roll
// of the selected object.
commands
.spawn(Node {
flex_direction: FlexDirection::Row,
position_type: PositionType::Absolute,
right: Val::Px(10.0),
bottom: Val::Px(10.0),
column_gap: Val::Px(6.0),
..default()
})
.with_children(|parent| {
spawn_drag_button(parent, "Scale").insert(DragMode::Scale);
spawn_drag_button(parent, "Roll").insert(DragMode::Roll);
});
}
/// Spawns a button that the user can drag to change a parameter.
fn spawn_drag_button<'a>(
commands: &'a mut ChildSpawnerCommands,
label: &str,
) -> EntityCommands<'a> {
let mut kid = commands.spawn(Node {
border: BUTTON_BORDER,
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
padding: BUTTON_PADDING,
..default()
});
kid.insert((
Button,
BackgroundColor(Color::BLACK),
BorderRadius::all(BUTTON_BORDER_RADIUS_SIZE),
BUTTON_BORDER_COLOR,
))
.with_children(|parent| {
widgets::spawn_ui_text(parent, label, Color::WHITE);
});
kid
}
/// Spawns the help text at the top of the screen.
fn spawn_help_text(commands: &mut Commands, app_status: &AppStatus) {
commands.spawn((
Text::new(create_help_string(app_status)),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
HelpText,
));
}
/// Draws the outlines that show the bounds of the clustered decals.
fn draw_gizmos(
mut gizmos: Gizmos,
decals: Query<(&GlobalTransform, &Selection), With<ClusteredDecal>>,
) {
for (global_transform, selection) in &decals {
let color = match *selection {
Selection::Camera => continue,
Selection::DecalA => ORANGE_RED,
Selection::DecalB => LIME,
};
gizmos.primitive_3d(
&Cuboid {
// Since the clustered decal is a 1×1×1 cube in model space, its
// half-size is half of the scaling part of its transform.
half_size: global_transform.scale() * 0.5,
},
Isometry3d {
rotation: global_transform.rotation(),
translation: global_transform.translation_vec3a(),
},
color,
);
}
}
/// Calculates the initial transform of the clustered decal.
fn calculate_initial_decal_transform(start: Vec3, looking_at: Vec3, size: Vec2) -> Transform {
let direction = looking_at - start;
let center = start + direction * 0.5;
Transform::from_translation(center)
.with_scale((size * 0.5).extend(direction.length()))
.looking_to(direction, Vec3::Y)
}
/// Rotates the cube a bit every frame.
fn rotate_cube(mut meshes: Query<&mut Transform, With<Mesh3d>>) {
for mut transform in &mut meshes {
transform.rotate_y(CUBE_ROTATION_SPEED);
}
}
/// Updates the state of the radio buttons when the user clicks on one.
fn update_radio_buttons(
mut widgets: Query<(
Entity,
Option<&mut BackgroundColor>,
Has<Text>,
&WidgetClickSender<Selection>,
)>,
app_status: Res<AppStatus>,
mut writer: TextUiWriter,
) {
for (entity, maybe_bg_color, has_text, sender) in &mut widgets {
let selected = app_status.selection == **sender;
if let Some(mut bg_color) = maybe_bg_color {
widgets::update_ui_radio_button(&mut bg_color, selected);
}
if has_text {
widgets::update_ui_radio_button_text(entity, &mut writer, selected);
}
}
}
/// Changes the selection when the user clicks a radio button.
fn handle_selection_change(
mut events: EventReader<WidgetClickEvent<Selection>>,
mut app_status: ResMut<AppStatus>,
) {
for event in events.read() {
app_status.selection = **event;
}
}
/// Process a drag event that moves the selected object.
fn process_move_input(
mut selections: Query<(&mut Transform, &Selection)>,
mouse_buttons: Res<ButtonInput<MouseButton>>,
mouse_motion: Res<AccumulatedMouseMotion>,
app_status: Res<AppStatus>,
) {
// Only process drags when movement is selected.
if !mouse_buttons.pressed(MouseButton::Left) || app_status.drag_mode != DragMode::Move {
return;
}
for (mut transform, selection) in &mut selections {
if app_status.selection != *selection {
continue;
}
let position = transform.translation;
// Convert to spherical coordinates.
let radius = position.length();
let mut theta = acos(position.y / radius);
let mut phi = position.z.signum() * acos(position.x * position.xz().length_recip());
// Camera movement is the inverse of object movement.
let (phi_factor, theta_factor) = match *selection {
Selection::Camera => (1.0, -1.0),
Selection::DecalA | Selection::DecalB => (-1.0, 1.0),
};
// Adjust the spherical coordinates. Clamp the inclination to (0, π).
phi += phi_factor * mouse_motion.delta.x * MOVE_SPEED;
theta = f32::clamp(
theta + theta_factor * mouse_motion.delta.y * MOVE_SPEED,
0.001,
PI - 0.001,
);
// Convert spherical coordinates back to Cartesian coordinates.
transform.translation =
radius * vec3(sin(theta) * cos(phi), cos(theta), sin(theta) * sin(phi));
// Look at the center, but preserve the previous roll angle.
let roll = transform.rotation.to_euler(EulerRot::YXZ).2;
transform.look_at(Vec3::ZERO, Vec3::Y);
let (yaw, pitch, _) = transform.rotation.to_euler(EulerRot::YXZ);
transform.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll);
}
}
/// Processes a drag event that scales the selected target.
fn process_scale_input(
mut selections: Query<(&mut Transform, &Selection)>,
mouse_buttons: Res<ButtonInput<MouseButton>>,
mouse_motion: Res<AccumulatedMouseMotion>,
app_status: Res<AppStatus>,
) {
// Only process drags when the scaling operation is selected.
if !mouse_buttons.pressed(MouseButton::Left) || app_status.drag_mode != DragMode::Scale {
return;
}
for (mut transform, selection) in &mut selections {
if app_status.selection == *selection {
transform.scale *= 1.0 + mouse_motion.delta.x * SCALE_SPEED;
}
}
}
/// Processes a drag event that rotates the selected target along its local Z
/// axis.
fn process_roll_input(
mut selections: Query<(&mut Transform, &Selection)>,
mouse_buttons: Res<ButtonInput<MouseButton>>,
mouse_motion: Res<AccumulatedMouseMotion>,
app_status: Res<AppStatus>,
) {
// Only process drags when the rolling operation is selected.
if !mouse_buttons.pressed(MouseButton::Left) || app_status.drag_mode != DragMode::Roll {
return;
}
for (mut transform, selection) in &mut selections {
if app_status.selection != *selection {
continue;
}
let (yaw, pitch, mut roll) = transform.rotation.to_euler(EulerRot::YXZ);
roll += mouse_motion.delta.x * ROLL_SPEED;
transform.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll);
}
}
/// Creates the help string at the top left of the screen.
fn create_help_string(app_status: &AppStatus) -> String {
format!(
"Click and drag to {} {}",
app_status.drag_mode, app_status.selection
)
}
/// Changes the drag mode when the user hovers over the "Scale" and "Roll"
/// buttons in the lower right.
///
/// If the user is hovering over no such button, this system changes the drag
/// mode back to its default value of [`DragMode::Move`].
fn switch_drag_mode(
mut commands: Commands,
mut interactions: Query<(&Interaction, &DragMode)>,
mut windows: Query<Entity, With<Window>>,
mouse_buttons: Res<ButtonInput<MouseButton>>,
mut app_status: ResMut<AppStatus>,
) {
if mouse_buttons.pressed(MouseButton::Left) {
return;
}
for (interaction, drag_mode) in &mut interactions {
if *interaction != Interaction::Hovered {
continue;
}
app_status.drag_mode = *drag_mode;
// Set the cursor to provide the user with a nice visual hint.
for window in &mut windows {
commands
.entity(window)
.insert(CursorIcon::from(SystemCursorIcon::EwResize));
}
return;
}
app_status.drag_mode = DragMode::Move;
for window in &mut windows {
commands.entity(window).remove::<CursorIcon>();
}
}
/// Updates the help text in the top left of the screen to reflect the current
/// selection and drag mode.
fn update_help_text(mut help_text: Query<&mut Text, With<HelpText>>, app_status: Res<AppStatus>) {
for mut text in &mut help_text {
text.0 = create_help_string(&app_status);
}
}
/// Updates the visibility of the drag mode buttons so that they aren't visible
/// if the camera is selected.
fn update_button_visibility(
mut nodes: Query<&mut Visibility, With<DragMode>>,
app_status: Res<AppStatus>,
) {
for mut visibility in &mut nodes {
*visibility = match app_status.selection {
Selection::Camera => Visibility::Hidden,
Selection::DecalA | Selection::DecalB => Visibility::Visible,
};
}
}

621
vendor/bevy/examples/3d/color_grading.rs vendored Normal file
View File

@@ -0,0 +1,621 @@
//! Demonstrates color grading with an interactive adjustment UI.
use std::{
f32::consts::PI,
fmt::{self, Formatter},
};
use bevy::{
ecs::system::EntityCommands,
pbr::CascadeShadowConfigBuilder,
prelude::*,
render::view::{ColorGrading, ColorGradingGlobal, ColorGradingSection},
};
use std::fmt::Display;
static FONT_PATH: &str = "fonts/FiraMono-Medium.ttf";
/// How quickly the value changes per frame.
const OPTION_ADJUSTMENT_SPEED: f32 = 0.003;
/// The color grading section that the user has selected: highlights, midtones,
/// or shadows.
#[derive(Clone, Copy, PartialEq)]
enum SelectedColorGradingSection {
Highlights,
Midtones,
Shadows,
}
/// The global option that the user has selected.
///
/// See the documentation of [`ColorGradingGlobal`] for more information about
/// each field here.
#[derive(Clone, Copy, PartialEq, Default)]
enum SelectedGlobalColorGradingOption {
#[default]
Exposure,
Temperature,
Tint,
Hue,
}
/// The section-specific option that the user has selected.
///
/// See the documentation of [`ColorGradingSection`] for more information about
/// each field here.
#[derive(Clone, Copy, PartialEq)]
enum SelectedSectionColorGradingOption {
Saturation,
Contrast,
Gamma,
Gain,
Lift,
}
/// The color grading option that the user has selected.
#[derive(Clone, Copy, PartialEq, Resource)]
enum SelectedColorGradingOption {
/// The user has selected a global color grading option: one that applies to
/// the whole image as opposed to specifically to highlights, midtones, or
/// shadows.
Global(SelectedGlobalColorGradingOption),
/// The user has selected a color grading option that applies only to
/// highlights, midtones, or shadows.
Section(
SelectedColorGradingSection,
SelectedSectionColorGradingOption,
),
}
impl Default for SelectedColorGradingOption {
fn default() -> Self {
Self::Global(default())
}
}
/// Buttons consist of three parts: the button itself, a label child, and a
/// value child. This specifies one of the three entities.
#[derive(Clone, Copy, PartialEq, Component)]
enum ColorGradingOptionWidgetType {
/// The parent button.
Button,
/// The label of the button.
Label,
/// The numerical value that the button displays.
Value,
}
#[derive(Clone, Copy, Component)]
struct ColorGradingOptionWidget {
widget_type: ColorGradingOptionWidgetType,
option: SelectedColorGradingOption,
}
/// A marker component for the help text at the top left of the screen.
#[derive(Clone, Copy, Component)]
struct HelpText;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.init_resource::<SelectedColorGradingOption>()
.add_systems(Startup, setup)
.add_systems(
Update,
(
handle_button_presses,
adjust_color_grading_option,
update_ui_state,
)
.chain(),
)
.run();
}
fn setup(
mut commands: Commands,
currently_selected_option: Res<SelectedColorGradingOption>,
asset_server: Res<AssetServer>,
) {
// Create the scene.
add_basic_scene(&mut commands, &asset_server);
// Create the root UI element.
let font = asset_server.load(FONT_PATH);
let color_grading = ColorGrading::default();
add_buttons(&mut commands, &font, &color_grading);
// Spawn help text.
add_help_text(&mut commands, &font, &currently_selected_option);
// Spawn the camera.
add_camera(&mut commands, &asset_server, color_grading);
}
/// Adds all the buttons on the bottom of the scene.
fn add_buttons(commands: &mut Commands, font: &Handle<Font>, color_grading: &ColorGrading) {
// Spawn the parent node that contains all the buttons.
commands
.spawn(Node {
flex_direction: FlexDirection::Column,
position_type: PositionType::Absolute,
row_gap: Val::Px(6.0),
left: Val::Px(12.0),
bottom: Val::Px(12.0),
..default()
})
.with_children(|parent| {
// Create the first row, which contains the global controls.
add_buttons_for_global_controls(parent, color_grading, font);
// Create the rows for individual controls.
for section in [
SelectedColorGradingSection::Highlights,
SelectedColorGradingSection::Midtones,
SelectedColorGradingSection::Shadows,
] {
add_buttons_for_section(parent, section, color_grading, font);
}
});
}
/// Adds the buttons for the global controls (those that control the scene as a
/// whole as opposed to shadows, midtones, or highlights).
fn add_buttons_for_global_controls(
parent: &mut ChildSpawnerCommands,
color_grading: &ColorGrading,
font: &Handle<Font>,
) {
// Add the parent node for the row.
parent.spawn(Node::default()).with_children(|parent| {
// Add some placeholder text to fill this column.
parent.spawn(Node {
width: Val::Px(125.0),
..default()
});
// Add each global color grading option button.
for option in [
SelectedGlobalColorGradingOption::Exposure,
SelectedGlobalColorGradingOption::Temperature,
SelectedGlobalColorGradingOption::Tint,
SelectedGlobalColorGradingOption::Hue,
] {
add_button_for_value(
parent,
SelectedColorGradingOption::Global(option),
color_grading,
font,
);
}
});
}
/// Adds the buttons that control color grading for individual sections
/// (highlights, midtones, shadows).
fn add_buttons_for_section(
parent: &mut ChildSpawnerCommands,
section: SelectedColorGradingSection,
color_grading: &ColorGrading,
font: &Handle<Font>,
) {
// Spawn the row container.
parent
.spawn(Node {
align_items: AlignItems::Center,
..default()
})
.with_children(|parent| {
// Spawn the label ("Highlights", etc.)
add_text(parent, &section.to_string(), font, Color::WHITE).insert(Node {
width: Val::Px(125.0),
..default()
});
// Spawn the buttons.
for option in [
SelectedSectionColorGradingOption::Saturation,
SelectedSectionColorGradingOption::Contrast,
SelectedSectionColorGradingOption::Gamma,
SelectedSectionColorGradingOption::Gain,
SelectedSectionColorGradingOption::Lift,
] {
add_button_for_value(
parent,
SelectedColorGradingOption::Section(section, option),
color_grading,
font,
);
}
});
}
/// Adds a button that controls one of the color grading values.
fn add_button_for_value(
parent: &mut ChildSpawnerCommands,
option: SelectedColorGradingOption,
color_grading: &ColorGrading,
font: &Handle<Font>,
) {
// Add the button node.
parent
.spawn((
Button,
Node {
border: UiRect::all(Val::Px(1.0)),
width: Val::Px(200.0),
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
padding: UiRect::axes(Val::Px(12.0), Val::Px(6.0)),
margin: UiRect::right(Val::Px(12.0)),
..default()
},
BorderColor(Color::WHITE),
BorderRadius::MAX,
BackgroundColor(Color::BLACK),
))
.insert(ColorGradingOptionWidget {
widget_type: ColorGradingOptionWidgetType::Button,
option,
})
.with_children(|parent| {
// Add the button label.
let label = match option {
SelectedColorGradingOption::Global(option) => option.to_string(),
SelectedColorGradingOption::Section(_, option) => option.to_string(),
};
add_text(parent, &label, font, Color::WHITE).insert(ColorGradingOptionWidget {
widget_type: ColorGradingOptionWidgetType::Label,
option,
});
// Add a spacer.
parent.spawn(Node {
flex_grow: 1.0,
..default()
});
// Add the value text.
add_text(
parent,
&format!("{:.3}", option.get(color_grading)),
font,
Color::WHITE,
)
.insert(ColorGradingOptionWidget {
widget_type: ColorGradingOptionWidgetType::Value,
option,
});
});
}
/// Creates the help text at the top of the screen.
fn add_help_text(
commands: &mut Commands,
font: &Handle<Font>,
currently_selected_option: &SelectedColorGradingOption,
) {
commands.spawn((
Text::new(create_help_text(currently_selected_option)),
TextFont {
font: font.clone(),
..default()
},
Node {
position_type: PositionType::Absolute,
left: Val::Px(12.0),
top: Val::Px(12.0),
..default()
},
HelpText,
));
}
/// Adds some text to the scene.
fn add_text<'a>(
parent: &'a mut ChildSpawnerCommands,
label: &str,
font: &Handle<Font>,
color: Color,
) -> EntityCommands<'a> {
parent.spawn((
Text::new(label),
TextFont {
font: font.clone(),
font_size: 15.0,
..default()
},
TextColor(color),
))
}
fn add_camera(commands: &mut Commands, asset_server: &AssetServer, color_grading: ColorGrading) {
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y),
color_grading,
DistanceFog {
color: Color::srgb_u8(43, 44, 47),
falloff: FogFalloff::Linear {
start: 1.0,
end: 8.0,
},
..default()
},
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2000.0,
..default()
},
));
}
fn add_basic_scene(commands: &mut Commands, asset_server: &AssetServer) {
// Spawn the main scene.
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/TonemappingTest/TonemappingTest.gltf"),
)));
// Spawn the flight helmet.
commands.spawn((
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
Transform::from_xyz(0.5, 0.0, -0.5).with_rotation(Quat::from_rotation_y(-0.15 * PI)),
));
// Spawn the light.
commands.spawn((
DirectionalLight {
illuminance: 15000.0,
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
CascadeShadowConfigBuilder {
maximum_distance: 3.0,
first_cascade_far_bound: 0.9,
..default()
}
.build(),
));
}
impl Display for SelectedGlobalColorGradingOption {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let name = match *self {
SelectedGlobalColorGradingOption::Exposure => "Exposure",
SelectedGlobalColorGradingOption::Temperature => "Temperature",
SelectedGlobalColorGradingOption::Tint => "Tint",
SelectedGlobalColorGradingOption::Hue => "Hue",
};
f.write_str(name)
}
}
impl Display for SelectedColorGradingSection {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let name = match *self {
SelectedColorGradingSection::Highlights => "Highlights",
SelectedColorGradingSection::Midtones => "Midtones",
SelectedColorGradingSection::Shadows => "Shadows",
};
f.write_str(name)
}
}
impl Display for SelectedSectionColorGradingOption {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let name = match *self {
SelectedSectionColorGradingOption::Saturation => "Saturation",
SelectedSectionColorGradingOption::Contrast => "Contrast",
SelectedSectionColorGradingOption::Gamma => "Gamma",
SelectedSectionColorGradingOption::Gain => "Gain",
SelectedSectionColorGradingOption::Lift => "Lift",
};
f.write_str(name)
}
}
impl Display for SelectedColorGradingOption {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
SelectedColorGradingOption::Global(option) => write!(f, "\"{option}\""),
SelectedColorGradingOption::Section(section, option) => {
write!(f, "\"{option}\" for \"{section}\"")
}
}
}
}
impl SelectedSectionColorGradingOption {
/// Returns the appropriate value in the given color grading section.
fn get(&self, section: &ColorGradingSection) -> f32 {
match *self {
SelectedSectionColorGradingOption::Saturation => section.saturation,
SelectedSectionColorGradingOption::Contrast => section.contrast,
SelectedSectionColorGradingOption::Gamma => section.gamma,
SelectedSectionColorGradingOption::Gain => section.gain,
SelectedSectionColorGradingOption::Lift => section.lift,
}
}
fn set(&self, section: &mut ColorGradingSection, value: f32) {
match *self {
SelectedSectionColorGradingOption::Saturation => section.saturation = value,
SelectedSectionColorGradingOption::Contrast => section.contrast = value,
SelectedSectionColorGradingOption::Gamma => section.gamma = value,
SelectedSectionColorGradingOption::Gain => section.gain = value,
SelectedSectionColorGradingOption::Lift => section.lift = value,
}
}
}
impl SelectedGlobalColorGradingOption {
/// Returns the appropriate value in the given set of global color grading
/// values.
fn get(&self, global: &ColorGradingGlobal) -> f32 {
match *self {
SelectedGlobalColorGradingOption::Exposure => global.exposure,
SelectedGlobalColorGradingOption::Temperature => global.temperature,
SelectedGlobalColorGradingOption::Tint => global.tint,
SelectedGlobalColorGradingOption::Hue => global.hue,
}
}
/// Sets the appropriate value in the given set of global color grading
/// values.
fn set(&self, global: &mut ColorGradingGlobal, value: f32) {
match *self {
SelectedGlobalColorGradingOption::Exposure => global.exposure = value,
SelectedGlobalColorGradingOption::Temperature => global.temperature = value,
SelectedGlobalColorGradingOption::Tint => global.tint = value,
SelectedGlobalColorGradingOption::Hue => global.hue = value,
}
}
}
impl SelectedColorGradingOption {
/// Returns the appropriate value in the given set of color grading values.
fn get(&self, color_grading: &ColorGrading) -> f32 {
match self {
SelectedColorGradingOption::Global(option) => option.get(&color_grading.global),
SelectedColorGradingOption::Section(
SelectedColorGradingSection::Highlights,
option,
) => option.get(&color_grading.highlights),
SelectedColorGradingOption::Section(SelectedColorGradingSection::Midtones, option) => {
option.get(&color_grading.midtones)
}
SelectedColorGradingOption::Section(SelectedColorGradingSection::Shadows, option) => {
option.get(&color_grading.shadows)
}
}
}
/// Sets the appropriate value in the given set of color grading values.
fn set(&self, color_grading: &mut ColorGrading, value: f32) {
match self {
SelectedColorGradingOption::Global(option) => {
option.set(&mut color_grading.global, value);
}
SelectedColorGradingOption::Section(
SelectedColorGradingSection::Highlights,
option,
) => option.set(&mut color_grading.highlights, value),
SelectedColorGradingOption::Section(SelectedColorGradingSection::Midtones, option) => {
option.set(&mut color_grading.midtones, value);
}
SelectedColorGradingOption::Section(SelectedColorGradingSection::Shadows, option) => {
option.set(&mut color_grading.shadows, value);
}
}
}
}
/// Handles mouse clicks on the buttons when the user clicks on a new one.
fn handle_button_presses(
mut interactions: Query<(&Interaction, &ColorGradingOptionWidget), Changed<Interaction>>,
mut currently_selected_option: ResMut<SelectedColorGradingOption>,
) {
for (interaction, widget) in interactions.iter_mut() {
if widget.widget_type == ColorGradingOptionWidgetType::Button
&& *interaction == Interaction::Pressed
{
*currently_selected_option = widget.option;
}
}
}
/// Updates the state of the UI based on the current state.
fn update_ui_state(
mut buttons: Query<(
&mut BackgroundColor,
&mut BorderColor,
&ColorGradingOptionWidget,
)>,
button_text: Query<(Entity, &ColorGradingOptionWidget), (With<Text>, Without<HelpText>)>,
help_text: Single<Entity, With<HelpText>>,
mut writer: TextUiWriter,
cameras: Single<Ref<ColorGrading>>,
currently_selected_option: Res<SelectedColorGradingOption>,
) {
// Exit early if the UI didn't change
if !currently_selected_option.is_changed() && !cameras.is_changed() {
return;
}
// The currently-selected option is drawn with inverted colors.
for (mut background, mut border_color, widget) in buttons.iter_mut() {
if *currently_selected_option == widget.option {
*background = Color::WHITE.into();
*border_color = Color::BLACK.into();
} else {
*background = Color::BLACK.into();
*border_color = Color::WHITE.into();
}
}
let value_label = format!("{:.3}", currently_selected_option.get(cameras.as_ref()));
// Update the buttons.
for (entity, widget) in button_text.iter() {
// Set the text color.
let color = if *currently_selected_option == widget.option {
Color::BLACK
} else {
Color::WHITE
};
writer.for_each_color(entity, |mut text_color| {
text_color.0 = color;
});
// Update the displayed value, if this is the currently-selected option.
if widget.widget_type == ColorGradingOptionWidgetType::Value
&& *currently_selected_option == widget.option
{
writer.for_each_text(entity, |mut text| {
text.clone_from(&value_label);
});
}
}
// Update the help text.
*writer.text(*help_text, 0) = create_help_text(&currently_selected_option);
}
/// Creates the help text at the top left of the window.
fn create_help_text(currently_selected_option: &SelectedColorGradingOption) -> String {
format!("Press Left/Right to adjust {currently_selected_option}")
}
/// Processes keyboard input to change the value of the currently-selected color
/// grading option.
fn adjust_color_grading_option(
mut color_grading: Single<&mut ColorGrading>,
input: Res<ButtonInput<KeyCode>>,
currently_selected_option: Res<SelectedColorGradingOption>,
) {
let mut delta = 0.0;
if input.pressed(KeyCode::ArrowLeft) {
delta -= OPTION_ADJUSTMENT_SPEED;
}
if input.pressed(KeyCode::ArrowRight) {
delta += OPTION_ADJUSTMENT_SPEED;
}
if delta != 0.0 {
let new_value = currently_selected_option.get(color_grading.as_ref()) + delta;
currently_selected_option.set(&mut color_grading, new_value);
}
}

96
vendor/bevy/examples/3d/decal.rs vendored Normal file
View File

@@ -0,0 +1,96 @@
//! Decal rendering.
#[path = "../helpers/camera_controller.rs"]
mod camera_controller;
use bevy::{
core_pipeline::prepass::DepthPrepass,
pbr::decal::{ForwardDecal, ForwardDecalMaterial, ForwardDecalMaterialExt},
prelude::*,
};
use camera_controller::{CameraController, CameraControllerPlugin};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
fn main() {
App::new()
.add_plugins((DefaultPlugins, CameraControllerPlugin))
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut standard_materials: ResMut<Assets<StandardMaterial>>,
mut decal_standard_materials: ResMut<Assets<ForwardDecalMaterial<StandardMaterial>>>,
asset_server: Res<AssetServer>,
) {
// Spawn the forward decal
commands.spawn((
Name::new("Decal"),
ForwardDecal,
MeshMaterial3d(decal_standard_materials.add(ForwardDecalMaterial {
base: StandardMaterial {
base_color_texture: Some(asset_server.load("textures/uv_checker_bw.png")),
..default()
},
extension: ForwardDecalMaterialExt {
depth_fade_factor: 1.0,
},
})),
Transform::from_scale(Vec3::splat(4.0)),
));
commands.spawn((
Name::new("Camera"),
Camera3d::default(),
CameraController::default(),
DepthPrepass, // Must enable the depth prepass to render forward decals
Transform::from_xyz(2.0, 9.5, 2.5).looking_at(Vec3::ZERO, Vec3::Y),
));
let white_material = standard_materials.add(Color::WHITE);
commands.spawn((
Name::new("Floor"),
Mesh3d(meshes.add(Rectangle::from_length(10.0))),
MeshMaterial3d(white_material.clone()),
Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)),
));
// Spawn a few cube with random rotations to showcase how the decals behave with non-flat geometry
let num_obs = 10;
let mut rng = ChaCha8Rng::seed_from_u64(19878367467713);
for i in 0..num_obs {
for j in 0..num_obs {
let rotation_axis: [f32; 3] = rng.r#gen();
let rotation_vec: Vec3 = rotation_axis.into();
let rotation: u32 = rng.gen_range(0..360);
let transform = Transform::from_xyz(
(-num_obs + 1) as f32 / 2.0 + i as f32,
-0.2,
(-num_obs + 1) as f32 / 2.0 + j as f32,
)
.with_rotation(Quat::from_axis_angle(
rotation_vec.normalize_or_zero(),
(rotation as f32).to_radians(),
));
commands.spawn((
Mesh3d(meshes.add(Cuboid::from_length(0.6))),
MeshMaterial3d(white_material.clone()),
transform,
));
}
}
commands.spawn((
Name::new("Light"),
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
));
}

View File

@@ -0,0 +1,371 @@
//! This example compares Forward, Forward + Prepass, and Deferred rendering.
use std::f32::consts::*;
use bevy::{
core_pipeline::{
fxaa::Fxaa,
prepass::{DeferredPrepass, DepthPrepass, MotionVectorPrepass, NormalPrepass},
},
image::ImageLoaderSettings,
math::ops,
pbr::{
CascadeShadowConfigBuilder, DefaultOpaqueRendererMethod, DirectionalLightShadowMap,
NotShadowCaster, NotShadowReceiver, OpaqueRendererMethod,
},
prelude::*,
};
fn main() {
App::new()
.insert_resource(DefaultOpaqueRendererMethod::deferred())
.insert_resource(DirectionalLightShadowMap { size: 4096 })
.add_plugins(DefaultPlugins)
.insert_resource(Pause(true))
.add_systems(Startup, (setup, setup_parallax))
.add_systems(Update, (animate_light_direction, switch_mode, spin))
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
) {
commands.spawn((
Camera3d::default(),
Camera {
// Deferred both supports both hdr: true and hdr: false
hdr: false,
..default()
},
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y),
// MSAA needs to be off for Deferred rendering
Msaa::Off,
DistanceFog {
color: Color::srgb_u8(43, 44, 47),
falloff: FogFalloff::Linear {
start: 1.0,
end: 8.0,
},
..default()
},
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2000.0,
..default()
},
DepthPrepass,
MotionVectorPrepass,
DeferredPrepass,
Fxaa::default(),
));
commands.spawn((
DirectionalLight {
illuminance: 15_000.,
shadows_enabled: true,
..default()
},
CascadeShadowConfigBuilder {
num_cascades: 3,
maximum_distance: 10.0,
..default()
}
.build(),
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 0.0, -FRAC_PI_4)),
));
// FlightHelmet
let helmet_scene = asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf"));
commands.spawn(SceneRoot(helmet_scene.clone()));
commands.spawn((
SceneRoot(helmet_scene),
Transform::from_xyz(-4.0, 0.0, -3.0),
));
let mut forward_mat: StandardMaterial = Color::srgb(0.1, 0.2, 0.1).into();
forward_mat.opaque_render_method = OpaqueRendererMethod::Forward;
let forward_mat_h = materials.add(forward_mat);
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(50.0, 50.0))),
MeshMaterial3d(forward_mat_h.clone()),
));
let cube_h = meshes.add(Cuboid::new(0.1, 0.1, 0.1));
let sphere_h = meshes.add(Sphere::new(0.125).mesh().uv(32, 18));
// Cubes
commands.spawn((
Mesh3d(cube_h.clone()),
MeshMaterial3d(forward_mat_h.clone()),
Transform::from_xyz(-0.3, 0.5, -0.2),
));
commands.spawn((
Mesh3d(cube_h),
MeshMaterial3d(forward_mat_h),
Transform::from_xyz(0.2, 0.5, 0.2),
));
let sphere_color = Color::srgb(10.0, 4.0, 1.0);
let sphere_pos = Transform::from_xyz(0.4, 0.5, -0.8);
// Emissive sphere
let mut unlit_mat: StandardMaterial = sphere_color.into();
unlit_mat.unlit = true;
commands.spawn((
Mesh3d(sphere_h.clone()),
MeshMaterial3d(materials.add(unlit_mat)),
sphere_pos,
NotShadowCaster,
));
// Light
commands.spawn((
PointLight {
intensity: 800.0,
radius: 0.125,
shadows_enabled: true,
color: sphere_color,
..default()
},
sphere_pos,
));
// Spheres
for i in 0..6 {
let j = i % 3;
let s_val = if i < 3 { 0.0 } else { 0.2 };
let material = if j == 0 {
materials.add(StandardMaterial {
base_color: Color::srgb(s_val, s_val, 1.0),
perceptual_roughness: 0.089,
metallic: 0.0,
..default()
})
} else if j == 1 {
materials.add(StandardMaterial {
base_color: Color::srgb(s_val, 1.0, s_val),
perceptual_roughness: 0.089,
metallic: 0.0,
..default()
})
} else {
materials.add(StandardMaterial {
base_color: Color::srgb(1.0, s_val, s_val),
perceptual_roughness: 0.089,
metallic: 0.0,
..default()
})
};
commands.spawn((
Mesh3d(sphere_h.clone()),
MeshMaterial3d(material),
Transform::from_xyz(
j as f32 * 0.25 + if i < 3 { -0.15 } else { 0.15 } - 0.4,
0.125,
-j as f32 * 0.25 + if i < 3 { -0.15 } else { 0.15 } + 0.4,
),
));
}
// sky
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(2.0, 1.0, 1.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("888888").unwrap().into(),
unlit: true,
cull_mode: None,
..default()
})),
Transform::from_scale(Vec3::splat(1_000_000.0)),
NotShadowCaster,
NotShadowReceiver,
));
// Example instructions
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
#[derive(Resource)]
struct Pause(bool);
fn animate_light_direction(
time: Res<Time>,
mut query: Query<&mut Transform, With<DirectionalLight>>,
pause: Res<Pause>,
) {
if pause.0 {
return;
}
for mut transform in &mut query {
transform.rotate_y(time.delta_secs() * PI / 5.0);
}
}
fn setup_parallax(
mut commands: Commands,
mut materials: ResMut<Assets<StandardMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
asset_server: Res<AssetServer>,
) {
// The normal map. Note that to generate it in the GIMP image editor, you should
// open the depth map, and do Filters → Generic → Normal Map
// You should enable the "flip X" checkbox.
let normal_handle = asset_server.load_with_settings(
"textures/parallax_example/cube_normal.png",
// The normal map texture is in linear color space. Lighting won't look correct
// if `is_srgb` is `true`, which is the default.
|settings: &mut ImageLoaderSettings| settings.is_srgb = false,
);
let mut cube = Mesh::from(Cuboid::new(0.15, 0.15, 0.15));
// NOTE: for normal maps and depth maps to work, the mesh
// needs tangents generated.
cube.generate_tangents().unwrap();
let parallax_material = materials.add(StandardMaterial {
perceptual_roughness: 0.4,
base_color_texture: Some(asset_server.load("textures/parallax_example/cube_color.png")),
normal_map_texture: Some(normal_handle),
// The depth map is a grayscale texture where black is the highest level and
// white the lowest.
depth_map: Some(asset_server.load("textures/parallax_example/cube_depth.png")),
parallax_depth_scale: 0.09,
parallax_mapping_method: ParallaxMappingMethod::Relief { max_steps: 4 },
max_parallax_layer_count: ops::exp2(5.0f32),
..default()
});
commands.spawn((
Mesh3d(meshes.add(cube)),
MeshMaterial3d(parallax_material),
Transform::from_xyz(0.4, 0.2, -0.8),
Spin { speed: 0.3 },
));
}
#[derive(Component)]
struct Spin {
speed: f32,
}
fn spin(time: Res<Time>, mut query: Query<(&mut Transform, &Spin)>, pause: Res<Pause>) {
if pause.0 {
return;
}
for (mut transform, spin) in query.iter_mut() {
transform.rotate_local_y(spin.speed * time.delta_secs());
transform.rotate_local_x(spin.speed * time.delta_secs());
transform.rotate_local_z(-spin.speed * time.delta_secs());
}
}
#[derive(Resource, Default)]
enum DefaultRenderMode {
#[default]
Deferred,
Forward,
ForwardPrepass,
}
fn switch_mode(
mut text: Single<&mut Text>,
mut commands: Commands,
keys: Res<ButtonInput<KeyCode>>,
mut default_opaque_renderer_method: ResMut<DefaultOpaqueRendererMethod>,
mut materials: ResMut<Assets<StandardMaterial>>,
cameras: Query<Entity, With<Camera>>,
mut pause: ResMut<Pause>,
mut hide_ui: Local<bool>,
mut mode: Local<DefaultRenderMode>,
) {
text.clear();
if keys.just_pressed(KeyCode::Space) {
pause.0 = !pause.0;
}
if keys.just_pressed(KeyCode::Digit1) {
*mode = DefaultRenderMode::Deferred;
default_opaque_renderer_method.set_to_deferred();
println!("DefaultOpaqueRendererMethod: Deferred");
for _ in materials.iter_mut() {}
for camera in &cameras {
commands.entity(camera).remove::<NormalPrepass>();
commands.entity(camera).insert(DepthPrepass);
commands.entity(camera).insert(MotionVectorPrepass);
commands.entity(camera).insert(DeferredPrepass);
}
}
if keys.just_pressed(KeyCode::Digit2) {
*mode = DefaultRenderMode::Forward;
default_opaque_renderer_method.set_to_forward();
println!("DefaultOpaqueRendererMethod: Forward");
for _ in materials.iter_mut() {}
for camera in &cameras {
commands.entity(camera).remove::<NormalPrepass>();
commands.entity(camera).remove::<DepthPrepass>();
commands.entity(camera).remove::<MotionVectorPrepass>();
commands.entity(camera).remove::<DeferredPrepass>();
}
}
if keys.just_pressed(KeyCode::Digit3) {
*mode = DefaultRenderMode::ForwardPrepass;
default_opaque_renderer_method.set_to_forward();
println!("DefaultOpaqueRendererMethod: Forward + Prepass");
for _ in materials.iter_mut() {}
for camera in &cameras {
commands.entity(camera).insert(NormalPrepass);
commands.entity(camera).insert(DepthPrepass);
commands.entity(camera).insert(MotionVectorPrepass);
commands.entity(camera).remove::<DeferredPrepass>();
}
}
if keys.just_pressed(KeyCode::KeyH) {
*hide_ui = !*hide_ui;
}
if !*hide_ui {
text.push_str("(H) Hide UI\n");
text.push_str("(Space) Play/Pause\n\n");
text.push_str("Rendering Method:\n");
text.push_str(&format!(
"(1) {} Deferred\n",
if let DefaultRenderMode::Deferred = *mode {
">"
} else {
""
}
));
text.push_str(&format!(
"(2) {} Forward\n",
if let DefaultRenderMode::Forward = *mode {
">"
} else {
""
}
));
text.push_str(&format!(
"(3) {} Forward + Prepass\n",
if let DefaultRenderMode::ForwardPrepass = *mode {
">"
} else {
""
}
));
}
}

View File

@@ -0,0 +1,263 @@
//! Demonstrates depth of field (DOF).
//!
//! The depth of field effect simulates the blur that a real camera produces on
//! objects that are out of focus.
//!
//! The test scene is inspired by [a blog post on depth of field in Unity].
//! However, the technique used in Bevy has little to do with that blog post,
//! and all the assets are original.
//!
//! [a blog post on depth of field in Unity]: https://catlikecoding.com/unity/tutorials/advanced-rendering/depth-of-field/
use bevy::{
core_pipeline::{
bloom::Bloom,
dof::{self, DepthOfField, DepthOfFieldMode},
tonemapping::Tonemapping,
},
pbr::Lightmap,
prelude::*,
render::camera::PhysicalCameraParameters,
};
/// The increments in which the user can adjust the focal distance, in meters
/// per frame.
const FOCAL_DISTANCE_SPEED: f32 = 0.05;
/// The increments in which the user can adjust the f-number, in units per frame.
const APERTURE_F_STOP_SPEED: f32 = 0.01;
/// The minimum distance that we allow the user to focus on.
const MIN_FOCAL_DISTANCE: f32 = 0.01;
/// The minimum f-number that we allow the user to set.
const MIN_APERTURE_F_STOPS: f32 = 0.05;
/// A resource that stores the settings that the user can change.
#[derive(Clone, Copy, Resource)]
struct AppSettings {
/// The distance from the camera to the area in the most focus.
focal_distance: f32,
/// The [f-number]. Lower numbers cause objects outside the focal distance
/// to be blurred more.
///
/// [f-number]: https://en.wikipedia.org/wiki/F-number
aperture_f_stops: f32,
/// Whether depth of field is on, and, if so, whether we're in Gaussian or
/// bokeh mode.
mode: Option<DepthOfFieldMode>,
}
fn main() {
App::new()
.init_resource::<AppSettings>()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Depth of Field Example".to_string(),
..default()
}),
..default()
}))
.add_systems(Startup, setup)
.add_systems(Update, tweak_scene)
.add_systems(
Update,
(adjust_focus, change_mode, update_dof_settings, update_text).chain(),
)
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_settings: Res<AppSettings>) {
// Spawn the camera. Enable HDR and bloom, as that highlights the depth of
// field effect.
let mut camera = commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 4.5, 8.25).looking_at(Vec3::ZERO, Vec3::Y),
Camera {
hdr: true,
..default()
},
Tonemapping::TonyMcMapface,
Bloom::NATURAL,
));
// Insert the depth of field settings.
if let Some(depth_of_field) = Option::<DepthOfField>::from(*app_settings) {
camera.insert(depth_of_field);
}
// Spawn the scene.
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/DepthOfFieldExample/DepthOfFieldExample.glb"),
)));
// Spawn the help text.
commands.spawn((
create_text(&app_settings),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// Adjusts the focal distance and f-number per user inputs.
fn adjust_focus(input: Res<ButtonInput<KeyCode>>, mut app_settings: ResMut<AppSettings>) {
// Change the focal distance if the user requested.
let distance_delta = if input.pressed(KeyCode::ArrowDown) {
-FOCAL_DISTANCE_SPEED
} else if input.pressed(KeyCode::ArrowUp) {
FOCAL_DISTANCE_SPEED
} else {
0.0
};
// Change the f-number if the user requested.
let f_stop_delta = if input.pressed(KeyCode::ArrowLeft) {
-APERTURE_F_STOP_SPEED
} else if input.pressed(KeyCode::ArrowRight) {
APERTURE_F_STOP_SPEED
} else {
0.0
};
app_settings.focal_distance =
(app_settings.focal_distance + distance_delta).max(MIN_FOCAL_DISTANCE);
app_settings.aperture_f_stops =
(app_settings.aperture_f_stops + f_stop_delta).max(MIN_APERTURE_F_STOPS);
}
/// Changes the depth of field mode (Gaussian, bokeh, off) per user inputs.
fn change_mode(input: Res<ButtonInput<KeyCode>>, mut app_settings: ResMut<AppSettings>) {
if !input.just_pressed(KeyCode::Space) {
return;
}
app_settings.mode = match app_settings.mode {
Some(DepthOfFieldMode::Bokeh) => Some(DepthOfFieldMode::Gaussian),
Some(DepthOfFieldMode::Gaussian) => None,
None => Some(DepthOfFieldMode::Bokeh),
}
}
impl Default for AppSettings {
fn default() -> Self {
Self {
// Objects 7 meters away will be in full focus.
focal_distance: 7.0,
// Set a nice blur level.
//
// This is a really low F-number, but we want to demonstrate the
// effect, even if it's kind of unrealistic.
aperture_f_stops: 1.0 / 8.0,
// Turn on bokeh by default, as it's the nicest-looking technique.
mode: Some(DepthOfFieldMode::Bokeh),
}
}
}
/// Writes the depth of field settings into the camera.
fn update_dof_settings(
mut commands: Commands,
view_targets: Query<Entity, With<Camera>>,
app_settings: Res<AppSettings>,
) {
let depth_of_field: Option<DepthOfField> = (*app_settings).into();
for view in view_targets.iter() {
match depth_of_field {
None => {
commands.entity(view).remove::<DepthOfField>();
}
Some(depth_of_field) => {
commands.entity(view).insert(depth_of_field);
}
}
}
}
/// Makes one-time adjustments to the scene that can't be encoded in glTF.
fn tweak_scene(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut lights: Query<&mut DirectionalLight, Changed<DirectionalLight>>,
mut named_entities: Query<
(Entity, &Name, &MeshMaterial3d<StandardMaterial>),
(With<Mesh3d>, Without<Lightmap>),
>,
) {
// Turn on shadows.
for mut light in lights.iter_mut() {
light.shadows_enabled = true;
}
// Add a nice lightmap to the circuit board.
for (entity, name, material) in named_entities.iter_mut() {
if &**name == "CircuitBoard" {
materials.get_mut(material).unwrap().lightmap_exposure = 10000.0;
commands.entity(entity).insert(Lightmap {
image: asset_server.load("models/DepthOfFieldExample/CircuitBoardLightmap.hdr"),
..default()
});
}
}
}
/// Update the help text entity per the current app settings.
fn update_text(mut texts: Query<&mut Text>, app_settings: Res<AppSettings>) {
for mut text in texts.iter_mut() {
*text = create_text(&app_settings);
}
}
/// Regenerates the app text component per the current app settings.
fn create_text(app_settings: &AppSettings) -> Text {
app_settings.help_text().into()
}
impl From<AppSettings> for Option<DepthOfField> {
fn from(app_settings: AppSettings) -> Self {
app_settings.mode.map(|mode| DepthOfField {
mode,
focal_distance: app_settings.focal_distance,
aperture_f_stops: app_settings.aperture_f_stops,
max_depth: 14.0,
..default()
})
}
}
impl AppSettings {
/// Builds the help text.
fn help_text(&self) -> String {
let Some(mode) = self.mode else {
return "Mode: Off (Press Space to change)".to_owned();
};
// We leave these as their defaults, so we don't need to store them in
// the app settings and can just fetch them from the default camera
// parameters.
let sensor_height = PhysicalCameraParameters::default().sensor_height;
let fov = PerspectiveProjection::default().fov;
format!(
"Focal distance: {} m (Press Up/Down to change)
Aperture F-stops: f/{} (Press Left/Right to change)
Sensor height: {}mm
Focal length: {}mm
Mode: {} (Press Space to change)",
self.focal_distance,
self.aperture_f_stops,
sensor_height * 1000.0,
dof::calculate_focal_length(sensor_height, fov) * 1000.0,
match mode {
DepthOfFieldMode::Bokeh => "Bokeh",
DepthOfFieldMode::Gaussian => "Gaussian",
}
)
}
}

View File

@@ -0,0 +1,93 @@
//! Showcases how to change the material of a `Scene` spawned from a Gltf
use bevy::{
app::{App, PluginGroup, Startup},
asset::{AssetServer, Assets},
audio::AudioPlugin,
color::{palettes, Color},
gltf::GltfAssetLabel,
math::{Dir3, Vec3},
pbr::{DirectionalLight, MeshMaterial3d, StandardMaterial},
prelude::{Camera3d, Children, Commands, Component, Query, Res, ResMut, Transform, Trigger},
scene::{SceneInstanceReady, SceneRoot},
DefaultPlugins,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins.build().disable::<AudioPlugin>())
.add_systems(Startup, setup_scene)
.add_observer(change_material)
.run();
}
/// This is added to a [`SceneRoot`] and will cause the [`StandardMaterial::base_color`]
/// of all materials to be overwritten
#[derive(Component)]
struct ColorOverride(Color);
fn setup_scene(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0., 1., 2.5).looking_at(Vec3::new(0., 0.25, 0.), Dir3::Y),
));
commands.spawn((
DirectionalLight::default(),
Transform::from_xyz(0., 1., 0.25).looking_at(Vec3::ZERO, Dir3::Y),
));
// FlightHelmet handle
let flight_helmet = asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf"));
// This model will keep its original materials
commands.spawn(SceneRoot(flight_helmet.clone()));
// This model will be tinted red
commands.spawn((
SceneRoot(flight_helmet.clone()),
Transform::from_xyz(-1.25, 0., 0.),
ColorOverride(palettes::tailwind::RED_300.into()),
));
// This model will be tinted green
commands.spawn((
SceneRoot(flight_helmet),
Transform::from_xyz(1.25, 0., 0.),
ColorOverride(palettes::tailwind::GREEN_300.into()),
));
}
fn change_material(
trigger: Trigger<SceneInstanceReady>,
mut commands: Commands,
children: Query<&Children>,
color_override: Query<&ColorOverride>,
mesh_materials: Query<&MeshMaterial3d<StandardMaterial>>,
mut asset_materials: ResMut<Assets<StandardMaterial>>,
) {
// Get the `ColorOverride` of the entity, if it does not have a color override, skip
let Ok(color_override) = color_override.get(trigger.target()) else {
return;
};
// Iterate over all children recursively
for descendants in children.iter_descendants(trigger.target()) {
// Get the material of the descendant
if let Some(material) = mesh_materials
.get(descendants)
.ok()
.and_then(|id| asset_materials.get_mut(id.id()))
{
// Create a copy of the material and override base color
// If you intend on creating multiple models with the same tint, it
// is best to cache the handle somewhere, as having multiple materials
// that are identical is expensive
let mut new_material = material.clone();
new_material.base_color = color_override.0;
// Override `MeshMaterial3d` with new material
commands
.entity(descendants)
.insert(MeshMaterial3d(asset_materials.add(new_material)));
}
}
}

275
vendor/bevy/examples/3d/fog.rs vendored Normal file
View File

@@ -0,0 +1,275 @@
//! This interactive example shows how to use distance fog,
//! and allows playing around with different fog settings.
//!
//! ## Controls
//!
//! | Key Binding | Action |
//! |:-------------------|:------------------------------------|
//! | `1` / `2` / `3` | Fog Falloff Mode |
//! | `A` / `S` | Move Start Distance (Linear Fog) |
//! | | Change Density (Exponential Fogs) |
//! | `Z` / `X` | Move End Distance (Linear Fog) |
//! | `-` / `=` | Adjust Fog Red Channel |
//! | `[` / `]` | Adjust Fog Green Channel |
//! | `;` / `'` | Adjust Fog Blue Channel |
//! | `.` / `?` | Adjust Fog Alpha Channel |
use bevy::{
math::ops,
pbr::{NotShadowCaster, NotShadowReceiver},
prelude::*,
};
fn main() {
App::new()
.insert_resource(AmbientLight::NONE)
.add_plugins(DefaultPlugins)
.add_systems(
Startup,
(setup_camera_fog, setup_pyramid_scene, setup_instructions),
)
.add_systems(Update, update_system)
.run();
}
fn setup_camera_fog(mut commands: Commands) {
commands.spawn((
Camera3d::default(),
DistanceFog {
color: Color::srgb(0.25, 0.25, 0.25),
falloff: FogFalloff::Linear {
start: 5.0,
end: 20.0,
},
..default()
},
));
}
fn setup_pyramid_scene(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
let stone = materials.add(StandardMaterial {
base_color: Srgba::hex("28221B").unwrap().into(),
perceptual_roughness: 1.0,
..default()
});
// pillars
for (x, z) in &[(-1.5, -1.5), (1.5, -1.5), (1.5, 1.5), (-1.5, 1.5)] {
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(1.0, 3.0, 1.0))),
MeshMaterial3d(stone.clone()),
Transform::from_xyz(*x, 1.5, *z),
));
}
// orb
commands.spawn((
Mesh3d(meshes.add(Sphere::default())),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("126212CC").unwrap().into(),
reflectance: 1.0,
perceptual_roughness: 0.0,
metallic: 0.5,
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_scale(Vec3::splat(1.75)).with_translation(Vec3::new(0.0, 4.0, 0.0)),
NotShadowCaster,
NotShadowReceiver,
));
// steps
for i in 0..50 {
let half_size = i as f32 / 2.0 + 3.0;
let y = -i as f32 / 2.0;
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(2.0 * half_size, 0.5, 2.0 * half_size))),
MeshMaterial3d(stone.clone()),
Transform::from_xyz(0.0, y + 0.25, 0.0),
));
}
// sky
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(2.0, 1.0, 1.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("888888").unwrap().into(),
unlit: true,
cull_mode: None,
..default()
})),
Transform::from_scale(Vec3::splat(1_000_000.0)),
));
// light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(0.0, 1.0, 0.0),
));
}
fn setup_instructions(mut commands: Commands) {
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
fn update_system(
camera: Single<(&mut DistanceFog, &mut Transform)>,
mut text: Single<&mut Text>,
time: Res<Time>,
keycode: Res<ButtonInput<KeyCode>>,
) {
let now = time.elapsed_secs();
let delta = time.delta_secs();
let (mut fog, mut transform) = camera.into_inner();
// Orbit camera around pyramid
let orbit_scale = 8.0 + ops::sin(now / 10.0) * 7.0;
*transform = Transform::from_xyz(
ops::cos(now / 5.0) * orbit_scale,
12.0 - orbit_scale / 2.0,
ops::sin(now / 5.0) * orbit_scale,
)
.looking_at(Vec3::ZERO, Vec3::Y);
// Fog Information
text.0 = format!("Fog Falloff: {:?}\nFog Color: {:?}", fog.falloff, fog.color);
// Fog Falloff Mode Switching
text.push_str("\n\n1 / 2 / 3 - Fog Falloff Mode");
if keycode.pressed(KeyCode::Digit1) {
if let FogFalloff::Linear { .. } = fog.falloff {
// No change
} else {
fog.falloff = FogFalloff::Linear {
start: 5.0,
end: 20.0,
};
};
}
if keycode.pressed(KeyCode::Digit2) {
if let FogFalloff::Exponential { .. } = fog.falloff {
// No change
} else if let FogFalloff::ExponentialSquared { density } = fog.falloff {
fog.falloff = FogFalloff::Exponential { density };
} else {
fog.falloff = FogFalloff::Exponential { density: 0.07 };
};
}
if keycode.pressed(KeyCode::Digit3) {
if let FogFalloff::Exponential { density } = fog.falloff {
fog.falloff = FogFalloff::ExponentialSquared { density };
} else if let FogFalloff::ExponentialSquared { .. } = fog.falloff {
// No change
} else {
fog.falloff = FogFalloff::Exponential { density: 0.07 };
};
}
// Linear Fog Controls
if let FogFalloff::Linear { start, end } = &mut fog.falloff {
text.push_str("\nA / S - Move Start Distance\nZ / X - Move End Distance");
if keycode.pressed(KeyCode::KeyA) {
*start -= delta * 3.0;
}
if keycode.pressed(KeyCode::KeyS) {
*start += delta * 3.0;
}
if keycode.pressed(KeyCode::KeyZ) {
*end -= delta * 3.0;
}
if keycode.pressed(KeyCode::KeyX) {
*end += delta * 3.0;
}
}
// Exponential Fog Controls
if let FogFalloff::Exponential { density } = &mut fog.falloff {
text.push_str("\nA / S - Change Density");
if keycode.pressed(KeyCode::KeyA) {
*density -= delta * 0.5 * *density;
if *density < 0.0 {
*density = 0.0;
}
}
if keycode.pressed(KeyCode::KeyS) {
*density += delta * 0.5 * *density;
}
}
// ExponentialSquared Fog Controls
if let FogFalloff::ExponentialSquared { density } = &mut fog.falloff {
text.push_str("\nA / S - Change Density");
if keycode.pressed(KeyCode::KeyA) {
*density -= delta * 0.5 * *density;
if *density < 0.0 {
*density = 0.0;
}
}
if keycode.pressed(KeyCode::KeyS) {
*density += delta * 0.5 * *density;
}
}
// RGBA Controls
text.push_str("\n\n- / = - Red\n[ / ] - Green\n; / ' - Blue\n. / ? - Alpha");
// We're performing various operations in the sRGB color space,
// so we convert the fog color to sRGB here, then modify it,
// and finally when we're done we can convert it back and set it.
let mut fog_color = Srgba::from(fog.color);
if keycode.pressed(KeyCode::Minus) {
fog_color.red = (fog_color.red - 0.1 * delta).max(0.0);
}
if keycode.any_pressed([KeyCode::Equal, KeyCode::NumpadEqual]) {
fog_color.red = (fog_color.red + 0.1 * delta).min(1.0);
}
if keycode.pressed(KeyCode::BracketLeft) {
fog_color.green = (fog_color.green - 0.1 * delta).max(0.0);
}
if keycode.pressed(KeyCode::BracketRight) {
fog_color.green = (fog_color.green + 0.1 * delta).min(1.0);
}
if keycode.pressed(KeyCode::Semicolon) {
fog_color.blue = (fog_color.blue - 0.1 * delta).max(0.0);
}
if keycode.pressed(KeyCode::Quote) {
fog_color.blue = (fog_color.blue + 0.1 * delta).min(1.0);
}
if keycode.pressed(KeyCode::Period) {
fog_color.alpha = (fog_color.alpha - 0.1 * delta).max(0.0);
}
if keycode.pressed(KeyCode::Slash) {
fog_color.alpha = (fog_color.alpha + 0.1 * delta).min(1.0);
}
fog.color = Color::from(fog_color);
}

82
vendor/bevy/examples/3d/fog_volumes.rs vendored Normal file
View File

@@ -0,0 +1,82 @@
//! Demonstrates fog volumes with voxel density textures.
//!
//! We render the Stanford bunny as a fog volume. Parts of the bunny become
//! lighter and darker as the camera rotates. This is physically-accurate
//! behavior that results from the scattering and absorption of the directional
//! light.
use bevy::{
math::vec3,
pbr::{FogVolume, VolumetricFog, VolumetricLight},
prelude::*,
};
/// Entry point.
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Fog Volumes Example".into(),
..default()
}),
..default()
}))
.insert_resource(AmbientLight::NONE)
.add_systems(Startup, setup)
.add_systems(Update, rotate_camera)
.run();
}
/// Spawns all the objects in the scene.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
// Spawn a fog volume with a voxelized version of the Stanford bunny.
commands.spawn((
Transform::from_xyz(0.0, 0.5, 0.0),
FogVolume {
density_texture: Some(asset_server.load("volumes/bunny.ktx2")),
density_factor: 1.0,
// Scatter as much of the light as possible, to brighten the bunny
// up.
scattering: 1.0,
..default()
},
));
// Spawn a bright directional light that illuminates the fog well.
commands.spawn((
Transform::from_xyz(1.0, 1.0, -0.3).looking_at(vec3(0.0, 0.5, 0.0), Vec3::Y),
DirectionalLight {
shadows_enabled: true,
illuminance: 32000.0,
..default()
},
// Make sure to add this for the light to interact with the fog.
VolumetricLight,
));
// Spawn a camera.
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-0.75, 1.0, 2.0).looking_at(vec3(0.0, 0.0, 0.0), Vec3::Y),
Camera {
hdr: true,
..default()
},
VolumetricFog {
// Make this relatively high in order to increase the fog quality.
step_count: 64,
// Disable ambient light.
ambient_intensity: 0.0,
..default()
},
));
}
/// Rotates the camera a bit every frame.
fn rotate_camera(mut cameras: Query<&mut Transform, With<Camera3d>>) {
for mut camera_transform in cameras.iter_mut() {
*camera_transform =
Transform::from_translation(Quat::from_rotation_y(0.01) * camera_transform.translation)
.looking_at(vec3(0.0, 0.5, 0.0), Vec3::Y);
}
}

View File

@@ -0,0 +1,275 @@
//! This example demonstrates how to create a custom mesh,
//! assign a custom UV mapping for a custom texture,
//! and how to change the UV mapping at run-time.
use bevy::{
prelude::*,
render::{
mesh::{Indices, VertexAttributeValues},
render_asset::RenderAssetUsages,
render_resource::PrimitiveTopology,
},
};
// Define a "marker" component to mark the custom mesh. Marker components are often used in Bevy for
// filtering entities in queries with `With`, they're usually not queried directly since they don't
// contain information within them.
#[derive(Component)]
struct CustomUV;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, input_handler)
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
) {
// Import the custom texture.
let custom_texture_handle: Handle<Image> = asset_server.load("textures/array_texture.png");
// Create and save a handle to the mesh.
let cube_mesh_handle: Handle<Mesh> = meshes.add(create_cube_mesh());
// Render the mesh with the custom texture, and add the marker.
commands.spawn((
Mesh3d(cube_mesh_handle),
MeshMaterial3d(materials.add(StandardMaterial {
base_color_texture: Some(custom_texture_handle),
..default()
})),
CustomUV,
));
// Transform for the camera and lighting, looking at (0,0,0) (the position of the mesh).
let camera_and_light_transform =
Transform::from_xyz(1.8, 1.8, 1.8).looking_at(Vec3::ZERO, Vec3::Y);
// Camera in 3D space.
commands.spawn((Camera3d::default(), camera_and_light_transform));
// Light up the scene.
commands.spawn((PointLight::default(), camera_and_light_transform));
// Text to describe the controls.
commands.spawn((
Text::new("Controls:\nSpace: Change UVs\nX/Y/Z: Rotate\nR: Reset orientation"),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// System to receive input from the user,
// check out examples/input/ for more examples about user input.
fn input_handler(
keyboard_input: Res<ButtonInput<KeyCode>>,
mesh_query: Query<&Mesh3d, With<CustomUV>>,
mut meshes: ResMut<Assets<Mesh>>,
mut query: Query<&mut Transform, With<CustomUV>>,
time: Res<Time>,
) {
if keyboard_input.just_pressed(KeyCode::Space) {
let mesh_handle = mesh_query.single().expect("Query not successful");
let mesh = meshes.get_mut(mesh_handle).unwrap();
toggle_texture(mesh);
}
if keyboard_input.pressed(KeyCode::KeyX) {
for mut transform in &mut query {
transform.rotate_x(time.delta_secs() / 1.2);
}
}
if keyboard_input.pressed(KeyCode::KeyY) {
for mut transform in &mut query {
transform.rotate_y(time.delta_secs() / 1.2);
}
}
if keyboard_input.pressed(KeyCode::KeyZ) {
for mut transform in &mut query {
transform.rotate_z(time.delta_secs() / 1.2);
}
}
if keyboard_input.pressed(KeyCode::KeyR) {
for mut transform in &mut query {
transform.look_to(Vec3::NEG_Z, Vec3::Y);
}
}
}
#[rustfmt::skip]
fn create_cube_mesh() -> Mesh {
// Keep the mesh data accessible in future frames to be able to mutate it in toggle_texture.
Mesh::new(PrimitiveTopology::TriangleList, RenderAssetUsages::MAIN_WORLD | RenderAssetUsages::RENDER_WORLD)
.with_inserted_attribute(
Mesh::ATTRIBUTE_POSITION,
// Each array is an [x, y, z] coordinate in local space.
// The camera coordinate space is right-handed x-right, y-up, z-back. This means "forward" is -Z.
// Meshes always rotate around their local [0, 0, 0] when a rotation is applied to their Transform.
// By centering our mesh around the origin, rotating the mesh preserves its center of mass.
vec![
// top (facing towards +y)
[-0.5, 0.5, -0.5], // vertex with index 0
[0.5, 0.5, -0.5], // vertex with index 1
[0.5, 0.5, 0.5], // etc. until 23
[-0.5, 0.5, 0.5],
// bottom (-y)
[-0.5, -0.5, -0.5],
[0.5, -0.5, -0.5],
[0.5, -0.5, 0.5],
[-0.5, -0.5, 0.5],
// right (+x)
[0.5, -0.5, -0.5],
[0.5, -0.5, 0.5],
[0.5, 0.5, 0.5], // This vertex is at the same position as vertex with index 2, but they'll have different UV and normal
[0.5, 0.5, -0.5],
// left (-x)
[-0.5, -0.5, -0.5],
[-0.5, -0.5, 0.5],
[-0.5, 0.5, 0.5],
[-0.5, 0.5, -0.5],
// back (+z)
[-0.5, -0.5, 0.5],
[-0.5, 0.5, 0.5],
[0.5, 0.5, 0.5],
[0.5, -0.5, 0.5],
// forward (-z)
[-0.5, -0.5, -0.5],
[-0.5, 0.5, -0.5],
[0.5, 0.5, -0.5],
[0.5, -0.5, -0.5],
],
)
// Set-up UV coordinates to point to the upper (V < 0.5), "dirt+grass" part of the texture.
// Take a look at the custom image (assets/textures/array_texture.png)
// so the UV coords will make more sense
// Note: (0.0, 0.0) = Top-Left in UV mapping, (1.0, 1.0) = Bottom-Right in UV mapping
.with_inserted_attribute(
Mesh::ATTRIBUTE_UV_0,
vec![
// Assigning the UV coords for the top side.
[0.0, 0.2], [0.0, 0.0], [1.0, 0.0], [1.0, 0.2],
// Assigning the UV coords for the bottom side.
[0.0, 0.45], [0.0, 0.25], [1.0, 0.25], [1.0, 0.45],
// Assigning the UV coords for the right side.
[1.0, 0.45], [0.0, 0.45], [0.0, 0.2], [1.0, 0.2],
// Assigning the UV coords for the left side.
[1.0, 0.45], [0.0, 0.45], [0.0, 0.2], [1.0, 0.2],
// Assigning the UV coords for the back side.
[0.0, 0.45], [0.0, 0.2], [1.0, 0.2], [1.0, 0.45],
// Assigning the UV coords for the forward side.
[0.0, 0.45], [0.0, 0.2], [1.0, 0.2], [1.0, 0.45],
],
)
// For meshes with flat shading, normals are orthogonal (pointing out) from the direction of
// the surface.
// Normals are required for correct lighting calculations.
// Each array represents a normalized vector, which length should be equal to 1.0.
.with_inserted_attribute(
Mesh::ATTRIBUTE_NORMAL,
vec![
// Normals for the top side (towards +y)
[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
// Normals for the bottom side (towards -y)
[0.0, -1.0, 0.0],
[0.0, -1.0, 0.0],
[0.0, -1.0, 0.0],
[0.0, -1.0, 0.0],
// Normals for the right side (towards +x)
[1.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
// Normals for the left side (towards -x)
[-1.0, 0.0, 0.0],
[-1.0, 0.0, 0.0],
[-1.0, 0.0, 0.0],
[-1.0, 0.0, 0.0],
// Normals for the back side (towards +z)
[0.0, 0.0, 1.0],
[0.0, 0.0, 1.0],
[0.0, 0.0, 1.0],
[0.0, 0.0, 1.0],
// Normals for the forward side (towards -z)
[0.0, 0.0, -1.0],
[0.0, 0.0, -1.0],
[0.0, 0.0, -1.0],
[0.0, 0.0, -1.0],
],
)
// Create the triangles out of the 24 vertices we created.
// To construct a square, we need 2 triangles, therefore 12 triangles in total.
// To construct a triangle, we need the indices of its 3 defined vertices, adding them one
// by one, in a counter-clockwise order (relative to the position of the viewer, the order
// should appear counter-clockwise from the front of the triangle, in this case from outside the cube).
// Read more about how to correctly build a mesh manually in the Bevy documentation of a Mesh,
// further examples and the implementation of the built-in shapes.
//
// The first two defined triangles look like this (marked with the vertex indices,
// and the axis), when looking down at the top (+y) of the cube:
// -Z
// ^
// 0---1
// | /|
// | / | -> +X
// |/ |
// 3---2
//
// The right face's (+x) triangles look like this, seen from the outside of the cube.
// +Y
// ^
// 10--11
// | /|
// | / | -> -Z
// |/ |
// 9---8
//
// The back face's (+z) triangles look like this, seen from the outside of the cube.
// +Y
// ^
// 17--18
// |\ |
// | \ | -> +X
// | \|
// 16--19
.with_inserted_indices(Indices::U32(vec![
0,3,1 , 1,3,2, // triangles making up the top (+y) facing side.
4,5,7 , 5,6,7, // bottom (-y)
8,11,9 , 9,11,10, // right (+x)
12,13,15 , 13,14,15, // left (-x)
16,19,17 , 17,19,18, // back (+z)
20,21,23 , 21,22,23, // forward (-z)
]))
}
// Function that changes the UV mapping of the mesh, to apply the other texture.
fn toggle_texture(mesh_to_change: &mut Mesh) {
// Get a mutable reference to the values of the UV attribute, so we can iterate over it.
let uv_attribute = mesh_to_change.attribute_mut(Mesh::ATTRIBUTE_UV_0).unwrap();
// The format of the UV coordinates should be Float32x2.
let VertexAttributeValues::Float32x2(uv_attribute) = uv_attribute else {
panic!("Unexpected vertex format, expected Float32x2.");
};
// Iterate over the UV coordinates, and change them as we want.
for uv_coord in uv_attribute.iter_mut() {
// If the UV coordinate points to the upper, "dirt+grass" part of the texture...
if (uv_coord[1] + 0.5) < 1.0 {
// ... point to the equivalent lower, "sand+water" part instead,
uv_coord[1] += 0.5;
} else {
// else, point back to the upper, "dirt+grass" part.
uv_coord[1] -= 0.5;
}
}
}

View File

@@ -0,0 +1,635 @@
//! This example shows how irradiance volumes affect the indirect lighting of
//! objects in a scene.
//!
//! The controls are as follows:
//!
//! * Space toggles the irradiance volume on and off.
//!
//! * Enter toggles the camera rotation on and off.
//!
//! * Tab switches the object between a plain sphere and a running fox.
//!
//! * Backspace shows and hides the voxel cubes.
//!
//! * Clicking anywhere moves the object.
use bevy::{
color::palettes::css::*,
core_pipeline::Skybox,
math::{uvec3, vec3},
pbr::{
irradiance_volume::IrradianceVolume, ExtendedMaterial, MaterialExtension, NotShadowCaster,
},
prelude::*,
render::render_resource::{AsBindGroup, ShaderRef, ShaderType},
window::PrimaryWindow,
};
/// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/irradiance_volume_voxel_visualization.wgsl";
// Rotation speed in radians per frame.
const ROTATION_SPEED: f32 = 0.2;
const FOX_SCALE: f32 = 0.05;
const SPHERE_SCALE: f32 = 2.0;
const IRRADIANCE_VOLUME_INTENSITY: f32 = 1800.0;
const AMBIENT_LIGHT_BRIGHTNESS: f32 = 0.06;
const VOXEL_CUBE_SCALE: f32 = 0.4;
static DISABLE_IRRADIANCE_VOLUME_HELP_TEXT: &str = "Space: Disable the irradiance volume";
static ENABLE_IRRADIANCE_VOLUME_HELP_TEXT: &str = "Space: Enable the irradiance volume";
static HIDE_VOXELS_HELP_TEXT: &str = "Backspace: Hide the voxels";
static SHOW_VOXELS_HELP_TEXT: &str = "Backspace: Show the voxels";
static STOP_ROTATION_HELP_TEXT: &str = "Enter: Stop rotation";
static START_ROTATION_HELP_TEXT: &str = "Enter: Start rotation";
static SWITCH_TO_FOX_HELP_TEXT: &str = "Tab: Switch to a skinned mesh";
static SWITCH_TO_SPHERE_HELP_TEXT: &str = "Tab: Switch to a plain sphere mesh";
static CLICK_TO_MOVE_HELP_TEXT: &str = "Left click: Move the object";
static GIZMO_COLOR: Color = Color::Srgba(YELLOW);
static VOXEL_FROM_WORLD: Mat4 = Mat4::from_cols_array_2d(&[
[-42.317566, 0.0, 0.0, 0.0],
[0.0, 0.0, 44.601563, 0.0],
[0.0, 16.73776, 0.0, 0.0],
[0.0, 6.544792, 0.0, 1.0],
]);
// The mode the application is in.
#[derive(Resource)]
struct AppStatus {
// Whether the user wants the irradiance volume to be applied.
irradiance_volume_present: bool,
// Whether the user wants the unskinned sphere mesh or the skinned fox mesh.
model: ExampleModel,
// Whether the user has requested the scene to rotate.
rotating: bool,
// Whether the user has requested the voxels to be displayed.
voxels_visible: bool,
}
// Which model the user wants to display.
#[derive(Clone, Copy, PartialEq)]
enum ExampleModel {
// The plain sphere.
Sphere,
// The fox, which is skinned.
Fox,
}
// Handles to all the assets used in this example.
#[derive(Resource)]
struct ExampleAssets {
// The glTF scene containing the colored floor.
main_scene: Handle<Scene>,
// The 3D texture containing the irradiance volume.
irradiance_volume: Handle<Image>,
// The plain sphere mesh.
main_sphere: Handle<Mesh>,
// The material used for the sphere.
main_sphere_material: Handle<StandardMaterial>,
// The glTF scene containing the animated fox.
fox: Handle<Scene>,
// The graph containing the animation that the fox will play.
fox_animation_graph: Handle<AnimationGraph>,
// The node within the animation graph containing the animation.
fox_animation_node: AnimationNodeIndex,
// The voxel cube mesh.
voxel_cube: Handle<Mesh>,
// The skybox.
skybox: Handle<Image>,
}
// The sphere and fox both have this component.
#[derive(Component)]
struct MainObject;
// Marks each of the voxel cubes.
#[derive(Component)]
struct VoxelCube;
// Marks the voxel cube parent object.
#[derive(Component)]
struct VoxelCubeParent;
type VoxelVisualizationMaterial = ExtendedMaterial<StandardMaterial, VoxelVisualizationExtension>;
#[derive(Asset, TypePath, AsBindGroup, Debug, Clone)]
struct VoxelVisualizationExtension {
#[uniform(100)]
irradiance_volume_info: VoxelVisualizationIrradianceVolumeInfo,
}
#[derive(ShaderType, Debug, Clone)]
struct VoxelVisualizationIrradianceVolumeInfo {
world_from_voxel: Mat4,
voxel_from_world: Mat4,
resolution: UVec3,
intensity: f32,
}
fn main() {
// Create the example app.
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Irradiance Volumes Example".into(),
..default()
}),
..default()
}))
.add_plugins(MaterialPlugin::<VoxelVisualizationMaterial>::default())
.init_resource::<AppStatus>()
.init_resource::<ExampleAssets>()
.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 0.0,
..default()
})
.add_systems(Startup, setup)
.add_systems(PreUpdate, create_cubes)
.add_systems(Update, rotate_camera)
.add_systems(Update, play_animations)
.add_systems(
Update,
handle_mouse_clicks
.after(rotate_camera)
.after(play_animations),
)
.add_systems(
Update,
change_main_object
.after(rotate_camera)
.after(play_animations),
)
.add_systems(
Update,
toggle_irradiance_volumes
.after(rotate_camera)
.after(play_animations),
)
.add_systems(
Update,
toggle_voxel_visibility
.after(rotate_camera)
.after(play_animations),
)
.add_systems(
Update,
toggle_rotation.after(rotate_camera).after(play_animations),
)
.add_systems(
Update,
draw_gizmo
.after(handle_mouse_clicks)
.after(change_main_object)
.after(toggle_irradiance_volumes)
.after(toggle_voxel_visibility)
.after(toggle_rotation),
)
.add_systems(
Update,
update_text
.after(handle_mouse_clicks)
.after(change_main_object)
.after(toggle_irradiance_volumes)
.after(toggle_voxel_visibility)
.after(toggle_rotation),
)
.run();
}
// Spawns all the scene objects.
fn setup(mut commands: Commands, assets: Res<ExampleAssets>, app_status: Res<AppStatus>) {
spawn_main_scene(&mut commands, &assets);
spawn_camera(&mut commands, &assets);
spawn_irradiance_volume(&mut commands, &assets);
spawn_light(&mut commands);
spawn_sphere(&mut commands, &assets);
spawn_voxel_cube_parent(&mut commands);
spawn_fox(&mut commands, &assets);
spawn_text(&mut commands, &app_status);
}
fn spawn_main_scene(commands: &mut Commands, assets: &ExampleAssets) {
commands.spawn(SceneRoot(assets.main_scene.clone()));
}
fn spawn_camera(commands: &mut Commands, assets: &ExampleAssets) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-10.012, 4.8605, 13.281).looking_at(Vec3::ZERO, Vec3::Y),
Skybox {
image: assets.skybox.clone(),
brightness: 150.0,
..default()
},
));
}
fn spawn_irradiance_volume(commands: &mut Commands, assets: &ExampleAssets) {
commands.spawn((
Transform::from_matrix(VOXEL_FROM_WORLD),
IrradianceVolume {
voxels: assets.irradiance_volume.clone(),
intensity: IRRADIANCE_VOLUME_INTENSITY,
..default()
},
LightProbe,
));
}
fn spawn_light(commands: &mut Commands) {
commands.spawn((
PointLight {
intensity: 250000.0,
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0762, 5.9039, 1.0055),
));
}
fn spawn_sphere(commands: &mut Commands, assets: &ExampleAssets) {
commands
.spawn((
Mesh3d(assets.main_sphere.clone()),
MeshMaterial3d(assets.main_sphere_material.clone()),
Transform::from_xyz(0.0, SPHERE_SCALE, 0.0).with_scale(Vec3::splat(SPHERE_SCALE)),
))
.insert(MainObject);
}
fn spawn_voxel_cube_parent(commands: &mut Commands) {
commands.spawn((Visibility::Hidden, Transform::default(), VoxelCubeParent));
}
fn spawn_fox(commands: &mut Commands, assets: &ExampleAssets) {
commands.spawn((
SceneRoot(assets.fox.clone()),
Visibility::Hidden,
Transform::from_scale(Vec3::splat(FOX_SCALE)),
MainObject,
));
}
fn spawn_text(commands: &mut Commands, app_status: &AppStatus) {
commands.spawn((
app_status.create_text(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// A system that updates the help text.
fn update_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) {
for mut text in text_query.iter_mut() {
*text = app_status.create_text();
}
}
impl AppStatus {
// Constructs the help text at the bottom of the screen based on the
// application status.
fn create_text(&self) -> Text {
let irradiance_volume_help_text = if self.irradiance_volume_present {
DISABLE_IRRADIANCE_VOLUME_HELP_TEXT
} else {
ENABLE_IRRADIANCE_VOLUME_HELP_TEXT
};
let voxels_help_text = if self.voxels_visible {
HIDE_VOXELS_HELP_TEXT
} else {
SHOW_VOXELS_HELP_TEXT
};
let rotation_help_text = if self.rotating {
STOP_ROTATION_HELP_TEXT
} else {
START_ROTATION_HELP_TEXT
};
let switch_mesh_help_text = match self.model {
ExampleModel::Sphere => SWITCH_TO_FOX_HELP_TEXT,
ExampleModel::Fox => SWITCH_TO_SPHERE_HELP_TEXT,
};
format!(
"{CLICK_TO_MOVE_HELP_TEXT}\n\
{voxels_help_text}\n\
{irradiance_volume_help_text}\n\
{rotation_help_text}\n\
{switch_mesh_help_text}"
)
.into()
}
}
// Rotates the camera a bit every frame.
fn rotate_camera(
mut camera_query: Query<&mut Transform, With<Camera3d>>,
time: Res<Time>,
app_status: Res<AppStatus>,
) {
if !app_status.rotating {
return;
}
for mut transform in camera_query.iter_mut() {
transform.translation = Vec2::from_angle(ROTATION_SPEED * time.delta_secs())
.rotate(transform.translation.xz())
.extend(transform.translation.y)
.xzy();
transform.look_at(Vec3::ZERO, Vec3::Y);
}
}
// Toggles between the unskinned sphere model and the skinned fox model if the
// user requests it.
fn change_main_object(
keyboard: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
mut sphere_query: Query<&mut Visibility, (With<MainObject>, With<Mesh3d>, Without<SceneRoot>)>,
mut fox_query: Query<&mut Visibility, (With<MainObject>, With<SceneRoot>)>,
) {
if !keyboard.just_pressed(KeyCode::Tab) {
return;
}
let Some(mut sphere_visibility) = sphere_query.iter_mut().next() else {
return;
};
let Some(mut fox_visibility) = fox_query.iter_mut().next() else {
return;
};
match app_status.model {
ExampleModel::Sphere => {
*sphere_visibility = Visibility::Hidden;
*fox_visibility = Visibility::Visible;
app_status.model = ExampleModel::Fox;
}
ExampleModel::Fox => {
*sphere_visibility = Visibility::Visible;
*fox_visibility = Visibility::Hidden;
app_status.model = ExampleModel::Sphere;
}
}
}
impl Default for AppStatus {
fn default() -> Self {
Self {
irradiance_volume_present: true,
rotating: true,
model: ExampleModel::Sphere,
voxels_visible: false,
}
}
}
// Turns on and off the irradiance volume as requested by the user.
fn toggle_irradiance_volumes(
mut commands: Commands,
keyboard: Res<ButtonInput<KeyCode>>,
light_probe_query: Query<Entity, With<LightProbe>>,
mut app_status: ResMut<AppStatus>,
assets: Res<ExampleAssets>,
mut ambient_light: ResMut<AmbientLight>,
) {
if !keyboard.just_pressed(KeyCode::Space) {
return;
};
let Some(light_probe) = light_probe_query.iter().next() else {
return;
};
if app_status.irradiance_volume_present {
commands.entity(light_probe).remove::<IrradianceVolume>();
ambient_light.brightness = AMBIENT_LIGHT_BRIGHTNESS * IRRADIANCE_VOLUME_INTENSITY;
app_status.irradiance_volume_present = false;
} else {
commands.entity(light_probe).insert(IrradianceVolume {
voxels: assets.irradiance_volume.clone(),
intensity: IRRADIANCE_VOLUME_INTENSITY,
..default()
});
ambient_light.brightness = 0.0;
app_status.irradiance_volume_present = true;
}
}
fn toggle_rotation(keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>) {
if keyboard.just_pressed(KeyCode::Enter) {
app_status.rotating = !app_status.rotating;
}
}
// Handles clicks on the plane that reposition the object.
fn handle_mouse_clicks(
buttons: Res<ButtonInput<MouseButton>>,
windows: Query<&Window, With<PrimaryWindow>>,
cameras: Query<(&Camera, &GlobalTransform)>,
mut main_objects: Query<&mut Transform, With<MainObject>>,
) {
if !buttons.pressed(MouseButton::Left) {
return;
}
let Some(mouse_position) = windows.iter().next().and_then(Window::cursor_position) else {
return;
};
let Some((camera, camera_transform)) = cameras.iter().next() else {
return;
};
// Figure out where the user clicked on the plane.
let Ok(ray) = camera.viewport_to_world(camera_transform, mouse_position) else {
return;
};
let Some(ray_distance) = ray.intersect_plane(Vec3::ZERO, InfinitePlane3d::new(Vec3::Y)) else {
return;
};
let plane_intersection = ray.origin + ray.direction.normalize() * ray_distance;
// Move all the main objects.
for mut transform in main_objects.iter_mut() {
transform.translation = vec3(
plane_intersection.x,
transform.translation.y,
plane_intersection.z,
);
}
}
impl FromWorld for ExampleAssets {
fn from_world(world: &mut World) -> Self {
let fox_animation =
world.load_asset(GltfAssetLabel::Animation(1).from_asset("models/animated/Fox.glb"));
let (fox_animation_graph, fox_animation_node) =
AnimationGraph::from_clip(fox_animation.clone());
ExampleAssets {
main_sphere: world.add_asset(Sphere::default().mesh().uv(32, 18)),
fox: world.load_asset(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")),
main_sphere_material: world.add_asset(Color::from(SILVER)),
main_scene: world.load_asset(
GltfAssetLabel::Scene(0)
.from_asset("models/IrradianceVolumeExample/IrradianceVolumeExample.glb"),
),
irradiance_volume: world.load_asset("irradiance_volumes/Example.vxgi.ktx2"),
fox_animation_graph: world.add_asset(fox_animation_graph),
fox_animation_node,
voxel_cube: world.add_asset(Cuboid::default()),
// Just use a specular map for the skybox since it's not too blurry.
// In reality you wouldn't do this--you'd use a real skybox texture--but
// reusing the textures like this saves space in the Bevy repository.
skybox: world.load_asset("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
}
}
}
// Plays the animation on the fox.
fn play_animations(
mut commands: Commands,
assets: Res<ExampleAssets>,
mut players: Query<(Entity, &mut AnimationPlayer), Without<AnimationGraphHandle>>,
) {
for (entity, mut player) in players.iter_mut() {
commands
.entity(entity)
.insert(AnimationGraphHandle(assets.fox_animation_graph.clone()));
player.play(assets.fox_animation_node).repeat();
}
}
fn create_cubes(
image_assets: Res<Assets<Image>>,
mut commands: Commands,
irradiance_volumes: Query<(&IrradianceVolume, &GlobalTransform)>,
voxel_cube_parents: Query<Entity, With<VoxelCubeParent>>,
voxel_cubes: Query<Entity, With<VoxelCube>>,
example_assets: Res<ExampleAssets>,
mut voxel_visualization_material_assets: ResMut<Assets<VoxelVisualizationMaterial>>,
) {
// If voxel cubes have already been spawned, don't do anything.
if !voxel_cubes.is_empty() {
return;
}
let Some(voxel_cube_parent) = voxel_cube_parents.iter().next() else {
return;
};
for (irradiance_volume, global_transform) in irradiance_volumes.iter() {
let Some(image) = image_assets.get(&irradiance_volume.voxels) else {
continue;
};
let resolution = image.texture_descriptor.size;
let voxel_cube_material = voxel_visualization_material_assets.add(ExtendedMaterial {
base: StandardMaterial::from(Color::from(RED)),
extension: VoxelVisualizationExtension {
irradiance_volume_info: VoxelVisualizationIrradianceVolumeInfo {
world_from_voxel: VOXEL_FROM_WORLD.inverse(),
voxel_from_world: VOXEL_FROM_WORLD,
resolution: uvec3(
resolution.width,
resolution.height,
resolution.depth_or_array_layers,
),
intensity: IRRADIANCE_VOLUME_INTENSITY,
},
},
});
let scale = vec3(
1.0 / resolution.width as f32,
1.0 / resolution.height as f32,
1.0 / resolution.depth_or_array_layers as f32,
);
// Spawn a cube for each voxel.
for z in 0..resolution.depth_or_array_layers {
for y in 0..resolution.height {
for x in 0..resolution.width {
let uvw = (uvec3(x, y, z).as_vec3() + 0.5) * scale - 0.5;
let pos = global_transform.transform_point(uvw);
let voxel_cube = commands
.spawn((
Mesh3d(example_assets.voxel_cube.clone()),
MeshMaterial3d(voxel_cube_material.clone()),
Transform::from_scale(Vec3::splat(VOXEL_CUBE_SCALE))
.with_translation(pos),
))
.insert(VoxelCube)
.insert(NotShadowCaster)
.id();
commands.entity(voxel_cube_parent).add_child(voxel_cube);
}
}
}
}
}
// Draws a gizmo showing the bounds of the irradiance volume.
fn draw_gizmo(
mut gizmos: Gizmos,
irradiance_volume_query: Query<&GlobalTransform, With<IrradianceVolume>>,
app_status: Res<AppStatus>,
) {
if app_status.voxels_visible {
for transform in irradiance_volume_query.iter() {
gizmos.cuboid(*transform, GIZMO_COLOR);
}
}
}
// Handles a request from the user to toggle the voxel visibility on and off.
fn toggle_voxel_visibility(
keyboard: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
mut voxel_cube_parent_query: Query<&mut Visibility, With<VoxelCubeParent>>,
) {
if !keyboard.just_pressed(KeyCode::Backspace) {
return;
}
app_status.voxels_visible = !app_status.voxels_visible;
for mut visibility in voxel_cube_parent_query.iter_mut() {
*visibility = if app_status.voxels_visible {
Visibility::Visible
} else {
Visibility::Hidden
};
}
}
impl MaterialExtension for VoxelVisualizationExtension {
fn fragment_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
}

311
vendor/bevy/examples/3d/lighting.rs vendored Normal file
View File

@@ -0,0 +1,311 @@
//! Illustrates different lights of various types and colors, some static, some moving over
//! a simple scene.
use std::f32::consts::PI;
use bevy::{
color::palettes::css::*,
pbr::CascadeShadowConfigBuilder,
prelude::*,
render::camera::{Exposure, PhysicalCameraParameters},
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.insert_resource(Parameters(PhysicalCameraParameters {
aperture_f_stops: 1.0,
shutter_speed_s: 1.0 / 125.0,
sensitivity_iso: 100.0,
sensor_height: 0.01866,
}))
.add_systems(Startup, setup)
.add_systems(Update, (update_exposure, movement, animate_light_direction))
.run();
}
#[derive(Resource, Default, Deref, DerefMut)]
struct Parameters(PhysicalCameraParameters);
#[derive(Component)]
struct Movable;
/// set up a simple 3D scene
fn setup(
parameters: Res<Parameters>,
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
// ground plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(10.0, 10.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::WHITE,
perceptual_roughness: 1.0,
..default()
})),
));
// left wall
let mut transform = Transform::from_xyz(2.5, 2.5, 0.0);
transform.rotate_z(PI / 2.);
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(5.0, 0.15, 5.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: INDIGO.into(),
perceptual_roughness: 1.0,
..default()
})),
transform,
));
// back (right) wall
let mut transform = Transform::from_xyz(0.0, 2.5, -2.5);
transform.rotate_x(PI / 2.);
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(5.0, 0.15, 5.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: INDIGO.into(),
perceptual_roughness: 1.0,
..default()
})),
transform,
));
// Bevy logo to demonstrate alpha mask shadows
let mut transform = Transform::from_xyz(-2.2, 0.5, 1.0);
transform.rotate_y(PI / 8.);
commands.spawn((
Mesh3d(meshes.add(Rectangle::new(2.0, 0.5))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color_texture: Some(asset_server.load("branding/bevy_logo_light.png")),
perceptual_roughness: 1.0,
alpha_mode: AlphaMode::Mask(0.5),
cull_mode: None,
..default()
})),
transform,
Movable,
));
// cube
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: DEEP_PINK.into(),
..default()
})),
Transform::from_xyz(0.0, 0.5, 0.0),
Movable,
));
// sphere
commands.spawn((
Mesh3d(meshes.add(Sphere::new(0.5).mesh().uv(32, 18))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: LIMEGREEN.into(),
..default()
})),
Transform::from_xyz(1.5, 1.0, 1.5),
Movable,
));
// ambient light
commands.insert_resource(AmbientLight {
color: ORANGE_RED.into(),
brightness: 0.02,
..default()
});
// red point light
commands.spawn((
PointLight {
intensity: 100_000.0,
color: RED.into(),
shadows_enabled: true,
..default()
},
Transform::from_xyz(1.0, 2.0, 0.0),
children![(
Mesh3d(meshes.add(Sphere::new(0.1).mesh().uv(32, 18))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: RED.into(),
emissive: LinearRgba::new(4.0, 0.0, 0.0, 0.0),
..default()
})),
)],
));
// green spot light
commands.spawn((
SpotLight {
intensity: 100_000.0,
color: LIME.into(),
shadows_enabled: true,
inner_angle: 0.6,
outer_angle: 0.8,
..default()
},
Transform::from_xyz(-1.0, 2.0, 0.0).looking_at(Vec3::new(-1.0, 0.0, 0.0), Vec3::Z),
children![(
Mesh3d(meshes.add(Capsule3d::new(0.1, 0.125))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: LIME.into(),
emissive: LinearRgba::new(0.0, 4.0, 0.0, 0.0),
..default()
})),
Transform::from_rotation(Quat::from_rotation_x(PI / 2.0)),
)],
));
// blue point light
commands.spawn((
PointLight {
intensity: 100_000.0,
color: BLUE.into(),
shadows_enabled: true,
..default()
},
Transform::from_xyz(0.0, 4.0, 0.0),
children![(
Mesh3d(meshes.add(Sphere::new(0.1).mesh().uv(32, 18))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: BLUE.into(),
emissive: LinearRgba::new(0.0, 0.0, 713.0, 0.0),
..default()
})),
)],
));
// directional 'sun' light
commands.spawn((
DirectionalLight {
illuminance: light_consts::lux::OVERCAST_DAY,
shadows_enabled: true,
..default()
},
Transform {
translation: Vec3::new(0.0, 2.0, 0.0),
rotation: Quat::from_rotation_x(-PI / 4.),
..default()
},
// The default cascade config is designed to handle large scenes.
// As this example has a much smaller world, we can tighten the shadow
// bounds for better visual quality.
CascadeShadowConfigBuilder {
first_cascade_far_bound: 4.0,
maximum_distance: 10.0,
..default()
}
.build(),
));
// example instructions
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
children![
TextSpan(format!("Aperture: f/{:.0}\n", parameters.aperture_f_stops,)),
TextSpan(format!(
"Shutter speed: 1/{:.0}s\n",
1.0 / parameters.shutter_speed_s
)),
TextSpan(format!(
"Sensitivity: ISO {:.0}\n",
parameters.sensitivity_iso
)),
TextSpan::new("\n\n"),
TextSpan::new("Controls\n"),
TextSpan::new("---------------\n"),
TextSpan::new("Arrow keys - Move objects\n"),
TextSpan::new("1/2 - Decrease/Increase aperture\n"),
TextSpan::new("3/4 - Decrease/Increase shutter speed\n"),
TextSpan::new("5/6 - Decrease/Increase sensitivity\n"),
TextSpan::new("R - Reset exposure"),
],
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
Exposure::from_physical_camera(**parameters),
));
}
fn update_exposure(
key_input: Res<ButtonInput<KeyCode>>,
mut parameters: ResMut<Parameters>,
mut exposure: Single<&mut Exposure>,
text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
// TODO: Clamp values to a reasonable range
let entity = *text;
if key_input.just_pressed(KeyCode::Digit2) {
parameters.aperture_f_stops *= 2.0;
} else if key_input.just_pressed(KeyCode::Digit1) {
parameters.aperture_f_stops *= 0.5;
}
if key_input.just_pressed(KeyCode::Digit4) {
parameters.shutter_speed_s *= 2.0;
} else if key_input.just_pressed(KeyCode::Digit3) {
parameters.shutter_speed_s *= 0.5;
}
if key_input.just_pressed(KeyCode::Digit6) {
parameters.sensitivity_iso += 100.0;
} else if key_input.just_pressed(KeyCode::Digit5) {
parameters.sensitivity_iso -= 100.0;
}
if key_input.just_pressed(KeyCode::KeyR) {
*parameters = Parameters::default();
}
*writer.text(entity, 1) = format!("Aperture: f/{:.0}\n", parameters.aperture_f_stops);
*writer.text(entity, 2) = format!(
"Shutter speed: 1/{:.0}s\n",
1.0 / parameters.shutter_speed_s
);
*writer.text(entity, 3) = format!("Sensitivity: ISO {:.0}\n", parameters.sensitivity_iso);
**exposure = Exposure::from_physical_camera(**parameters);
}
fn animate_light_direction(
time: Res<Time>,
mut query: Query<&mut Transform, With<DirectionalLight>>,
) {
for mut transform in &mut query {
transform.rotate_y(time.delta_secs() * 0.5);
}
}
fn movement(
input: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
mut query: Query<&mut Transform, With<Movable>>,
) {
for mut transform in &mut query {
let mut direction = Vec3::ZERO;
if input.pressed(KeyCode::ArrowUp) {
direction.y += 1.0;
}
if input.pressed(KeyCode::ArrowDown) {
direction.y -= 1.0;
}
if input.pressed(KeyCode::ArrowLeft) {
direction.x -= 1.0;
}
if input.pressed(KeyCode::ArrowRight) {
direction.x += 1.0;
}
transform.translation += time.delta_secs() * 2.0 * direction;
}
}

103
vendor/bevy/examples/3d/lightmaps.rs vendored Normal file
View File

@@ -0,0 +1,103 @@
//! Rendering a scene with baked lightmaps.
use argh::FromArgs;
use bevy::{
core_pipeline::prepass::{DeferredPrepass, DepthPrepass, MotionVectorPrepass},
pbr::{DefaultOpaqueRendererMethod, Lightmap},
prelude::*,
};
/// Demonstrates lightmaps
#[derive(FromArgs, Resource)]
struct Args {
/// enables deferred shading
#[argh(switch)]
deferred: bool,
/// enables bicubic filtering
#[argh(switch)]
bicubic: bool,
}
fn main() {
#[cfg(not(target_arch = "wasm32"))]
let args: Args = argh::from_env();
#[cfg(target_arch = "wasm32")]
let args: Args = Args::from_args(&[], &[]).unwrap();
let mut app = App::new();
app.add_plugins(DefaultPlugins)
.insert_resource(AmbientLight::NONE);
if args.deferred {
app.insert_resource(DefaultOpaqueRendererMethod::deferred());
}
app.insert_resource(args)
.add_systems(Startup, setup)
.add_systems(Update, add_lightmaps_to_meshes)
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, args: Res<Args>) {
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/CornellBox/CornellBox.glb"),
)));
let mut camera = commands.spawn((
Camera3d::default(),
Transform::from_xyz(-278.0, 273.0, 800.0),
));
if args.deferred {
camera.insert((
DepthPrepass,
MotionVectorPrepass,
DeferredPrepass,
Msaa::Off,
));
}
}
fn add_lightmaps_to_meshes(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut materials: ResMut<Assets<StandardMaterial>>,
meshes: Query<
(Entity, &Name, &MeshMaterial3d<StandardMaterial>),
(With<Mesh3d>, Without<Lightmap>),
>,
args: Res<Args>,
) {
let exposure = 250.0;
for (entity, name, material) in meshes.iter() {
if &**name == "large_box" {
materials.get_mut(material).unwrap().lightmap_exposure = exposure;
commands.entity(entity).insert(Lightmap {
image: asset_server.load("lightmaps/CornellBox-Large.zstd.ktx2"),
bicubic_sampling: args.bicubic,
..default()
});
continue;
}
if &**name == "small_box" {
materials.get_mut(material).unwrap().lightmap_exposure = exposure;
commands.entity(entity).insert(Lightmap {
image: asset_server.load("lightmaps/CornellBox-Small.zstd.ktx2"),
bicubic_sampling: args.bicubic,
..default()
});
continue;
}
if name.starts_with("cornell_box") {
materials.get_mut(material).unwrap().lightmap_exposure = exposure;
commands.entity(entity).insert(Lightmap {
image: asset_server.load("lightmaps/CornellBox-Box.zstd.ktx2"),
bicubic_sampling: args.bicubic,
..default()
});
continue;
}
}
}

129
vendor/bevy/examples/3d/lines.rs vendored Normal file
View File

@@ -0,0 +1,129 @@
//! Create a custom material to draw basic lines in 3D
use bevy::{
pbr::{MaterialPipeline, MaterialPipelineKey},
prelude::*,
reflect::TypePath,
render::{
mesh::{MeshVertexBufferLayoutRef, PrimitiveTopology},
render_asset::RenderAssetUsages,
render_resource::{
AsBindGroup, PolygonMode, RenderPipelineDescriptor, ShaderRef,
SpecializedMeshPipelineError,
},
},
};
/// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/line_material.wgsl";
fn main() {
App::new()
.add_plugins((DefaultPlugins, MaterialPlugin::<LineMaterial>::default()))
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<LineMaterial>>,
) {
// Spawn a list of lines with start and end points for each lines
commands.spawn((
Mesh3d(meshes.add(LineList {
lines: vec![
(Vec3::ZERO, Vec3::new(1.0, 1.0, 0.0)),
(Vec3::new(1.0, 1.0, 0.0), Vec3::new(1.0, 0.0, 0.0)),
],
})),
MeshMaterial3d(materials.add(LineMaterial {
color: LinearRgba::GREEN,
})),
Transform::from_xyz(-1.5, 0.0, 0.0),
));
// Spawn a line strip that goes from point to point
commands.spawn((
Mesh3d(meshes.add(LineStrip {
points: vec![
Vec3::ZERO,
Vec3::new(1.0, 1.0, 0.0),
Vec3::new(1.0, 0.0, 0.0),
],
})),
MeshMaterial3d(materials.add(LineMaterial {
color: LinearRgba::BLUE,
})),
Transform::from_xyz(0.5, 0.0, 0.0),
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}
#[derive(Asset, TypePath, Default, AsBindGroup, Debug, Clone)]
struct LineMaterial {
#[uniform(0)]
color: LinearRgba,
}
impl Material for LineMaterial {
fn fragment_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
fn specialize(
_pipeline: &MaterialPipeline<Self>,
descriptor: &mut RenderPipelineDescriptor,
_layout: &MeshVertexBufferLayoutRef,
_key: MaterialPipelineKey<Self>,
) -> Result<(), SpecializedMeshPipelineError> {
// This is the important part to tell bevy to render this material as a line between vertices
descriptor.primitive.polygon_mode = PolygonMode::Line;
Ok(())
}
}
/// A list of lines with a start and end position
#[derive(Debug, Clone)]
struct LineList {
lines: Vec<(Vec3, Vec3)>,
}
impl From<LineList> for Mesh {
fn from(line: LineList) -> Self {
let vertices: Vec<_> = line.lines.into_iter().flat_map(|(a, b)| [a, b]).collect();
Mesh::new(
// This tells wgpu that the positions are list of lines
// where every pair is a start and end point
PrimitiveTopology::LineList,
RenderAssetUsages::RENDER_WORLD,
)
// Add the vertices positions as an attribute
.with_inserted_attribute(Mesh::ATTRIBUTE_POSITION, vertices)
}
}
/// A list of points that will have a line drawn between each consecutive points
#[derive(Debug, Clone)]
struct LineStrip {
points: Vec<Vec3>,
}
impl From<LineStrip> for Mesh {
fn from(line: LineStrip) -> Self {
Mesh::new(
// This tells wgpu that the positions are a list of points
// where a line will be drawn between each consecutive point
PrimitiveTopology::LineStrip,
RenderAssetUsages::RENDER_WORLD,
)
// Add the point positions as an attribute
.with_inserted_attribute(Mesh::ATTRIBUTE_POSITION, line.points)
}
}

63
vendor/bevy/examples/3d/load_gltf.rs vendored Normal file
View File

@@ -0,0 +1,63 @@
//! Loads and renders a glTF file as a scene.
use bevy::{
pbr::{CascadeShadowConfigBuilder, DirectionalLightShadowMap},
prelude::*,
};
use std::f32::consts::*;
fn main() {
App::new()
.insert_resource(DirectionalLightShadowMap { size: 4096 })
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, animate_light_direction)
.run();
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y),
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 250.0,
..default()
},
));
commands.spawn((
DirectionalLight {
shadows_enabled: true,
..default()
},
// This is a relatively small scene, so use tighter shadow
// cascade bounds than the default for better quality.
// We also adjusted the shadow map to be larger since we're
// only using a single cascade.
CascadeShadowConfigBuilder {
num_cascades: 1,
maximum_distance: 1.6,
..default()
}
.build(),
));
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf"),
)));
}
fn animate_light_direction(
time: Res<Time>,
mut query: Query<&mut Transform, With<DirectionalLight>>,
) {
for mut transform in &mut query {
transform.rotation = Quat::from_euler(
EulerRot::ZYX,
0.0,
time.elapsed_secs() * PI / 5.0,
-FRAC_PI_4,
);
}
}

View File

@@ -0,0 +1,91 @@
//! Loads and renders a glTF file as a scene, and list all the different `gltf_extras`.
use bevy::{
gltf::{GltfExtras, GltfMaterialExtras, GltfMeshExtras, GltfSceneExtras},
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, check_for_gltf_extras)
.run();
}
#[derive(Component)]
struct ExampleDisplay;
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(2.0, 2.0, 2.0).looking_at(Vec3::ZERO, Vec3::Y),
));
commands.spawn(DirectionalLight {
shadows_enabled: true,
..default()
});
// a barebones scene containing one of each gltf_extra type
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/extras/gltf_extras.glb"),
)));
// a place to display the extras on screen
commands.spawn((
Text::default(),
TextFont {
font_size: 15.,
..default()
},
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
ExampleDisplay,
));
}
fn check_for_gltf_extras(
gltf_extras_per_entity: Query<(
Entity,
Option<&Name>,
Option<&GltfSceneExtras>,
Option<&GltfExtras>,
Option<&GltfMeshExtras>,
Option<&GltfMaterialExtras>,
)>,
mut display: Single<&mut Text, With<ExampleDisplay>>,
) {
let mut gltf_extra_infos_lines: Vec<String> = vec![];
for (id, name, scene_extras, extras, mesh_extras, material_extras) in
gltf_extras_per_entity.iter()
{
if scene_extras.is_some()
|| extras.is_some()
|| mesh_extras.is_some()
|| material_extras.is_some()
{
let formatted_extras = format!(
"Extras per entity {} ('Name: {}'):
- scene extras: {:?}
- primitive extras: {:?}
- mesh extras: {:?}
- material extras: {:?}
",
id,
name.unwrap_or(&Name::default()),
scene_extras,
extras,
mesh_extras,
material_extras
);
gltf_extra_infos_lines.push(formatted_extras);
}
display.0 = gltf_extra_infos_lines.join("\n");
}
}

115
vendor/bevy/examples/3d/mesh_ray_cast.rs vendored Normal file
View File

@@ -0,0 +1,115 @@
//! Demonstrates how to use the [`MeshRayCast`] system parameter to chain multiple ray casts
//! and bounce off of surfaces.
use std::f32::consts::{FRAC_PI_2, PI};
use bevy::{
color::palettes::css,
core_pipeline::{bloom::Bloom, tonemapping::Tonemapping},
math::vec3,
picking::backend::ray::RayMap,
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, bouncing_raycast)
.insert_resource(ClearColor(Color::BLACK))
.run();
}
const MAX_BOUNCES: usize = 64;
const LASER_SPEED: f32 = 0.03;
fn bouncing_raycast(
mut ray_cast: MeshRayCast,
mut gizmos: Gizmos,
time: Res<Time>,
// The ray map stores rays cast by the cursor
ray_map: Res<RayMap>,
) {
// Cast an automatically moving ray and bounce it off of surfaces
let t = ops::cos((time.elapsed_secs() - 4.0).max(0.0) * LASER_SPEED) * PI;
let ray_pos = Vec3::new(ops::sin(t), ops::cos(3.0 * t) * 0.5, ops::cos(t)) * 0.5;
let ray_dir = Dir3::new(-ray_pos).unwrap();
let ray = Ray3d::new(ray_pos, ray_dir);
gizmos.sphere(ray_pos, 0.1, Color::WHITE);
bounce_ray(ray, &mut ray_cast, &mut gizmos, Color::from(css::RED));
// Cast a ray from the cursor and bounce it off of surfaces
for (_, ray) in ray_map.iter() {
bounce_ray(*ray, &mut ray_cast, &mut gizmos, Color::from(css::GREEN));
}
}
// Bounces a ray off of surfaces `MAX_BOUNCES` times.
fn bounce_ray(mut ray: Ray3d, ray_cast: &mut MeshRayCast, gizmos: &mut Gizmos, color: Color) {
let mut intersections = Vec::with_capacity(MAX_BOUNCES + 1);
intersections.push((ray.origin, Color::srgb(30.0, 0.0, 0.0)));
for i in 0..MAX_BOUNCES {
// Cast the ray and get the first hit
let Some((_, hit)) = ray_cast
.cast_ray(ray, &MeshRayCastSettings::default())
.first()
else {
break;
};
// Draw the point of intersection and add it to the list
let brightness = 1.0 + 10.0 * (1.0 - i as f32 / MAX_BOUNCES as f32);
intersections.push((hit.point, Color::BLACK.mix(&color, brightness)));
gizmos.sphere(hit.point, 0.005, Color::BLACK.mix(&color, brightness * 2.0));
// Reflect the ray off of the surface
ray.direction = Dir3::new(ray.direction.reflect(hit.normal)).unwrap();
ray.origin = hit.point + ray.direction * 1e-6;
}
gizmos.linestrip_gradient(intersections);
}
// Set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// Make a box of planes facing inward so the laser gets trapped inside
let plane_mesh = meshes.add(Plane3d::default());
let plane_material = materials.add(Color::from(css::GRAY).with_alpha(0.01));
let create_plane = move |translation, rotation| {
(
Transform::from_translation(translation)
.with_rotation(Quat::from_scaled_axis(rotation)),
Mesh3d(plane_mesh.clone()),
MeshMaterial3d(plane_material.clone()),
)
};
commands.spawn(create_plane(vec3(0.0, 0.5, 0.0), Vec3::X * PI));
commands.spawn(create_plane(vec3(0.0, -0.5, 0.0), Vec3::ZERO));
commands.spawn(create_plane(vec3(0.5, 0.0, 0.0), Vec3::Z * FRAC_PI_2));
commands.spawn(create_plane(vec3(-0.5, 0.0, 0.0), Vec3::Z * -FRAC_PI_2));
commands.spawn(create_plane(vec3(0.0, 0.0, 0.5), Vec3::X * -FRAC_PI_2));
commands.spawn(create_plane(vec3(0.0, 0.0, -0.5), Vec3::X * FRAC_PI_2));
// Light
commands.spawn((
DirectionalLight::default(),
Transform::from_rotation(Quat::from_euler(EulerRot::XYZ, -0.1, 0.2, 0.0)),
));
// Camera
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(1.5, 1.5, 1.5).looking_at(Vec3::ZERO, Vec3::Y),
Tonemapping::TonyMcMapface,
Bloom::default(),
));
}

131
vendor/bevy/examples/3d/meshlet.rs vendored Normal file
View File

@@ -0,0 +1,131 @@
//! Meshlet rendering for dense high-poly scenes (experimental).
// Note: This example showcases the meshlet API, but is not the type of scene that would benefit from using meshlets.
#[path = "../helpers/camera_controller.rs"]
mod camera_controller;
use bevy::{
pbr::{
experimental::meshlet::{MeshletMesh3d, MeshletPlugin},
CascadeShadowConfigBuilder, DirectionalLightShadowMap,
},
prelude::*,
render::render_resource::AsBindGroup,
};
use camera_controller::{CameraController, CameraControllerPlugin};
use std::{f32::consts::PI, path::Path, process::ExitCode};
const ASSET_URL: &str =
"https://raw.githubusercontent.com/JMS55/bevy_meshlet_asset/7a7c14138021f63904b584d5f7b73b695c7f4bbf/bunny.meshlet_mesh";
fn main() -> ExitCode {
if !Path::new("./assets/external/models/bunny.meshlet_mesh").exists() {
eprintln!("ERROR: Asset at path <bevy>/assets/external/models/bunny.meshlet_mesh is missing. Please download it from {ASSET_URL}");
return ExitCode::FAILURE;
}
App::new()
.insert_resource(DirectionalLightShadowMap { size: 4096 })
.add_plugins((
DefaultPlugins,
MeshletPlugin {
cluster_buffer_slots: 8192,
},
MaterialPlugin::<MeshletDebugMaterial>::default(),
CameraControllerPlugin,
))
.add_systems(Startup, setup)
.run();
ExitCode::SUCCESS
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut standard_materials: ResMut<Assets<StandardMaterial>>,
mut debug_materials: ResMut<Assets<MeshletDebugMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
) {
commands.spawn((
Camera3d::default(),
Transform::from_translation(Vec3::new(1.8, 0.4, -0.1)).looking_at(Vec3::ZERO, Vec3::Y),
Msaa::Off,
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 150.0,
..default()
},
CameraController::default(),
));
commands.spawn((
DirectionalLight {
illuminance: light_consts::lux::FULL_DAYLIGHT,
shadows_enabled: true,
..default()
},
CascadeShadowConfigBuilder {
num_cascades: 1,
maximum_distance: 15.0,
..default()
}
.build(),
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
));
// A custom file format storing a [`bevy_render::mesh::Mesh`]
// that has been converted to a [`bevy_pbr::meshlet::MeshletMesh`]
// using [`bevy_pbr::meshlet::MeshletMesh::from_mesh`], which is
// a function only available when the `meshlet_processor` cargo feature is enabled.
let meshlet_mesh_handle = asset_server.load("external/models/bunny.meshlet_mesh");
let debug_material = debug_materials.add(MeshletDebugMaterial::default());
for x in -2..=2 {
commands.spawn((
MeshletMesh3d(meshlet_mesh_handle.clone()),
MeshMaterial3d(standard_materials.add(StandardMaterial {
base_color: match x {
-2 => Srgba::hex("#dc2626").unwrap().into(),
-1 => Srgba::hex("#ea580c").unwrap().into(),
0 => Srgba::hex("#facc15").unwrap().into(),
1 => Srgba::hex("#16a34a").unwrap().into(),
2 => Srgba::hex("#0284c7").unwrap().into(),
_ => unreachable!(),
},
perceptual_roughness: (x + 2) as f32 / 4.0,
..default()
})),
Transform::default()
.with_scale(Vec3::splat(0.2))
.with_translation(Vec3::new(x as f32 / 2.0, 0.0, -0.3)),
));
}
for x in -2..=2 {
commands.spawn((
MeshletMesh3d(meshlet_mesh_handle.clone()),
MeshMaterial3d(debug_material.clone()),
Transform::default()
.with_scale(Vec3::splat(0.2))
.with_rotation(Quat::from_rotation_y(PI))
.with_translation(Vec3::new(x as f32 / 2.0, 0.0, 0.3)),
));
}
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))),
MeshMaterial3d(standard_materials.add(StandardMaterial {
base_color: Color::WHITE,
perceptual_roughness: 1.0,
..default()
})),
));
}
#[derive(Asset, TypePath, AsBindGroup, Clone, Default)]
struct MeshletDebugMaterial {
_dummy: (),
}
impl Material for MeshletDebugMaterial {}

View File

@@ -0,0 +1,525 @@
//! Demonstrates how to combine baked and dynamic lighting.
use bevy::{
pbr::Lightmap,
picking::{backend::HitData, pointer::PointerInteraction},
prelude::*,
scene::SceneInstanceReady,
};
use crate::widgets::{RadioButton, RadioButtonText, WidgetClickEvent, WidgetClickSender};
#[path = "../helpers/widgets.rs"]
mod widgets;
/// How bright the lightmaps are.
const LIGHTMAP_EXPOSURE: f32 = 600.0;
/// How far above the ground the sphere's origin is when moved, in scene units.
const SPHERE_OFFSET: f32 = 0.2;
/// The settings that the user has currently chosen for the app.
#[derive(Clone, Default, Resource)]
struct AppStatus {
/// The lighting mode that the user currently has set: baked, mixed, or
/// real-time.
lighting_mode: LightingMode,
}
/// The type of lighting to use in the scene.
#[derive(Clone, Copy, PartialEq, Default)]
enum LightingMode {
/// All light is computed ahead of time; no lighting takes place at runtime.
///
/// In this mode, the sphere can't be moved, as the light shining on it was
/// precomputed. On the plus side, the sphere has indirect lighting in this
/// mode, as the red hue on the bottom of the sphere demonstrates.
Baked,
/// All light for the static objects is computed ahead of time, but the
/// light for the dynamic sphere is computed at runtime.
///
/// In this mode, the sphere can be moved, and the light will be computed
/// for it as you do so. The sphere loses indirect illumination; notice the
/// lack of a red hue at the base of the sphere. However, the rest of the
/// scene has indirect illumination. Note also that the sphere doesn't cast
/// a shadow on the static objects in this mode, because shadows are part of
/// the lighting computation.
MixedDirect,
/// Indirect light for the static objects is computed ahead of time, and
/// direct light for all objects is computed at runtime.
///
/// In this mode, the sphere can be moved, and the light will be computed
/// for it as you do so. The sphere loses indirect illumination; notice the
/// lack of a red hue at the base of the sphere. However, the rest of the
/// scene has indirect illumination. The sphere does cast a shadow on
/// objects in this mode, because the direct light for all objects is being
/// computed dynamically.
#[default]
MixedIndirect,
/// Light is computed at runtime for all objects.
///
/// In this mode, no lightmaps are used at all. All objects are dynamically
/// lit, which provides maximum flexibility. However, the downside is that
/// global illumination is lost; note that the base of the sphere isn't red
/// as it is in baked mode.
RealTime,
}
/// An event that's fired whenever the user changes the lighting mode.
///
/// This is also fired when the scene loads for the first time.
#[derive(Clone, Copy, Default, Event)]
struct LightingModeChanged;
#[derive(Clone, Copy, Component, Debug)]
struct HelpText;
/// The name of every static object in the scene that has a lightmap, as well as
/// the UV rect of its lightmap.
///
/// Storing this as an array and doing a linear search through it is rather
/// inefficient, but we do it anyway for clarity's sake.
static LIGHTMAPS: [(&str, Rect); 5] = [
(
"Plane",
uv_rect_opengl(Vec2::splat(0.026), Vec2::splat(0.710)),
),
(
"SheenChair_fabric",
uv_rect_opengl(vec2(0.7864, 0.02377), vec2(0.1910, 0.1912)),
),
(
"SheenChair_label",
uv_rect_opengl(vec2(0.275, -0.016), vec2(0.858, 0.486)),
),
(
"SheenChair_metal",
uv_rect_opengl(vec2(0.998, 0.506), vec2(-0.029, -0.067)),
),
(
"SheenChair_wood",
uv_rect_opengl(vec2(0.787, 0.257), vec2(0.179, 0.177)),
),
];
static SPHERE_UV_RECT: Rect = uv_rect_opengl(vec2(0.788, 0.484), Vec2::splat(0.062));
/// The initial position of the sphere.
///
/// When the user sets the light mode to [`LightingMode::Baked`], we reset the
/// position to this point.
const INITIAL_SPHERE_POSITION: Vec3 = vec3(0.0, 0.5233223, 0.0);
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Mixed Lighting Example".into(),
..default()
}),
..default()
}))
.add_plugins(MeshPickingPlugin)
.insert_resource(AmbientLight {
color: ClearColor::default().0,
brightness: 10000.0,
affects_lightmapped_meshes: true,
})
.init_resource::<AppStatus>()
.add_event::<WidgetClickEvent<LightingMode>>()
.add_event::<LightingModeChanged>()
.add_systems(Startup, setup)
.add_systems(Update, update_lightmaps)
.add_systems(Update, update_directional_light)
.add_systems(Update, make_sphere_nonpickable)
.add_systems(Update, update_radio_buttons)
.add_systems(Update, handle_lighting_mode_change)
.add_systems(Update, widgets::handle_ui_interactions::<LightingMode>)
.add_systems(Update, reset_sphere_position)
.add_systems(Update, move_sphere)
.add_systems(Update, adjust_help_text)
.run();
}
/// Creates the scene.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_status: Res<AppStatus>) {
spawn_camera(&mut commands);
spawn_scene(&mut commands, &asset_server);
spawn_buttons(&mut commands);
spawn_help_text(&mut commands, &app_status);
}
/// Spawns the 3D camera.
fn spawn_camera(commands: &mut Commands) {
commands
.spawn(Camera3d::default())
.insert(Transform::from_xyz(-0.7, 0.7, 1.0).looking_at(vec3(0.0, 0.3, 0.0), Vec3::Y));
}
/// Spawns the scene.
///
/// The scene is loaded from a glTF file.
fn spawn_scene(commands: &mut Commands, asset_server: &AssetServer) {
commands
.spawn(SceneRoot(
asset_server.load(
GltfAssetLabel::Scene(0)
.from_asset("models/MixedLightingExample/MixedLightingExample.gltf"),
),
))
.observe(
|_: Trigger<SceneInstanceReady>,
mut lighting_mode_change_event_writer: EventWriter<LightingModeChanged>| {
// When the scene loads, send a `LightingModeChanged` event so
// that we set up the lightmaps.
lighting_mode_change_event_writer.write(LightingModeChanged);
},
);
}
/// Spawns the buttons that allow the user to change the lighting mode.
fn spawn_buttons(commands: &mut Commands) {
commands
.spawn(widgets::main_ui_node())
.with_children(|parent| {
widgets::spawn_option_buttons(
parent,
"Lighting",
&[
(LightingMode::Baked, "Baked"),
(LightingMode::MixedDirect, "Mixed (Direct)"),
(LightingMode::MixedIndirect, "Mixed (Indirect)"),
(LightingMode::RealTime, "Real-Time"),
],
);
});
}
/// Spawns the help text at the top of the window.
fn spawn_help_text(commands: &mut Commands, app_status: &AppStatus) {
commands.spawn((
create_help_text(app_status),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
HelpText,
));
}
/// Adds lightmaps to and/or removes lightmaps from objects in the scene when
/// the lighting mode changes.
///
/// This is also called right after the scene loads in order to set up the
/// lightmaps.
fn update_lightmaps(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut materials: ResMut<Assets<StandardMaterial>>,
meshes: Query<(Entity, &Name, &MeshMaterial3d<StandardMaterial>), With<Mesh3d>>,
mut lighting_mode_change_event_reader: EventReader<LightingModeChanged>,
app_status: Res<AppStatus>,
) {
// Only run if the lighting mode changed. (Note that a change event is fired
// when the scene first loads.)
if lighting_mode_change_event_reader.read().next().is_none() {
return;
}
// Select the lightmap to use, based on the lighting mode.
let lightmap: Option<Handle<Image>> = match app_status.lighting_mode {
LightingMode::Baked => {
Some(asset_server.load("lightmaps/MixedLightingExample-Baked.zstd.ktx2"))
}
LightingMode::MixedDirect => {
Some(asset_server.load("lightmaps/MixedLightingExample-MixedDirect.zstd.ktx2"))
}
LightingMode::MixedIndirect => {
Some(asset_server.load("lightmaps/MixedLightingExample-MixedIndirect.zstd.ktx2"))
}
LightingMode::RealTime => None,
};
'outer: for (entity, name, material) in &meshes {
// Add lightmaps to or remove lightmaps from the scenery objects in the
// scene (all objects but the sphere).
//
// Note that doing a linear search through the `LIGHTMAPS` array is
// inefficient, but we do it anyway in this example to improve clarity.
for (lightmap_name, uv_rect) in LIGHTMAPS {
if &**name != lightmap_name {
continue;
}
// Lightmap exposure defaults to zero, so we need to set it.
if let Some(ref mut material) = materials.get_mut(material) {
material.lightmap_exposure = LIGHTMAP_EXPOSURE;
}
// Add or remove the lightmap.
match lightmap {
Some(ref lightmap) => {
commands.entity(entity).insert(Lightmap {
image: (*lightmap).clone(),
uv_rect,
bicubic_sampling: false,
});
}
None => {
commands.entity(entity).remove::<Lightmap>();
}
}
continue 'outer;
}
// Add lightmaps to or remove lightmaps from the sphere.
if &**name == "Sphere" {
// Lightmap exposure defaults to zero, so we need to set it.
if let Some(ref mut material) = materials.get_mut(material) {
material.lightmap_exposure = LIGHTMAP_EXPOSURE;
}
// Add or remove the lightmap from the sphere. We only apply the
// lightmap in fully-baked mode.
match (&lightmap, app_status.lighting_mode) {
(Some(lightmap), LightingMode::Baked) => {
commands.entity(entity).insert(Lightmap {
image: (*lightmap).clone(),
uv_rect: SPHERE_UV_RECT,
bicubic_sampling: false,
});
}
_ => {
commands.entity(entity).remove::<Lightmap>();
}
}
}
}
}
/// Converts a uv rectangle from the OpenGL coordinate system (origin in the
/// lower left) to the Vulkan coordinate system (origin in the upper left) that
/// Bevy uses.
///
/// For this particular example, the baking tool happened to use the OpenGL
/// coordinate system, so it was more convenient to do the conversion at compile
/// time than to pre-calculate and hard-code the values.
const fn uv_rect_opengl(gl_min: Vec2, size: Vec2) -> Rect {
let min = vec2(gl_min.x, 1.0 - gl_min.y - size.y);
Rect {
min,
max: vec2(min.x + size.x, min.y + size.y),
}
}
/// Ensures that clicking on the scene to move the sphere doesn't result in a
/// hit on the sphere itself.
fn make_sphere_nonpickable(
mut commands: Commands,
mut query: Query<(Entity, &Name), (With<Mesh3d>, Without<Pickable>)>,
) {
for (sphere, name) in &mut query {
if &**name == "Sphere" {
commands.entity(sphere).insert(Pickable::IGNORE);
}
}
}
/// Updates the directional light settings as necessary when the lighting mode
/// changes.
fn update_directional_light(
mut lights: Query<&mut DirectionalLight>,
mut lighting_mode_change_event_reader: EventReader<LightingModeChanged>,
app_status: Res<AppStatus>,
) {
// Only run if the lighting mode changed. (Note that a change event is fired
// when the scene first loads.)
if lighting_mode_change_event_reader.read().next().is_none() {
return;
}
// Real-time direct light is used on the scenery if we're using mixed
// indirect or real-time mode.
let scenery_is_lit_in_real_time = matches!(
app_status.lighting_mode,
LightingMode::MixedIndirect | LightingMode::RealTime
);
for mut light in &mut lights {
light.affects_lightmapped_mesh_diffuse = scenery_is_lit_in_real_time;
// Don't bother enabling shadows if they won't show up on the scenery.
light.shadows_enabled = scenery_is_lit_in_real_time;
}
}
/// Updates the state of the selection widgets at the bottom of the window when
/// the lighting mode changes.
fn update_radio_buttons(
mut widgets: Query<
(
Entity,
Option<&mut BackgroundColor>,
Has<Text>,
&WidgetClickSender<LightingMode>,
),
Or<(With<RadioButton>, With<RadioButtonText>)>,
>,
app_status: Res<AppStatus>,
mut writer: TextUiWriter,
) {
for (entity, image, has_text, sender) in &mut widgets {
let selected = **sender == app_status.lighting_mode;
if let Some(mut bg_color) = image {
widgets::update_ui_radio_button(&mut bg_color, selected);
}
if has_text {
widgets::update_ui_radio_button_text(entity, &mut writer, selected);
}
}
}
/// Handles clicks on the widgets at the bottom of the screen and fires
/// [`LightingModeChanged`] events.
fn handle_lighting_mode_change(
mut widget_click_event_reader: EventReader<WidgetClickEvent<LightingMode>>,
mut lighting_mode_change_event_writer: EventWriter<LightingModeChanged>,
mut app_status: ResMut<AppStatus>,
) {
for event in widget_click_event_reader.read() {
app_status.lighting_mode = **event;
lighting_mode_change_event_writer.write(LightingModeChanged);
}
}
/// Moves the sphere to its original position when the user selects the baked
/// lighting mode.
///
/// As the light from the sphere is precomputed and depends on the sphere's
/// original position, the sphere must be placed there in order for the lighting
/// to be correct.
fn reset_sphere_position(
mut objects: Query<(&Name, &mut Transform)>,
mut lighting_mode_change_event_reader: EventReader<LightingModeChanged>,
app_status: Res<AppStatus>,
) {
// Only run if the lighting mode changed and if the lighting mode is
// `LightingMode::Baked`. (Note that a change event is fired when the scene
// first loads.)
if lighting_mode_change_event_reader.read().next().is_none()
|| app_status.lighting_mode != LightingMode::Baked
{
return;
}
for (name, mut transform) in &mut objects {
if &**name == "Sphere" {
transform.translation = INITIAL_SPHERE_POSITION;
break;
}
}
}
/// Updates the position of the sphere when the user clicks on a spot in the
/// scene.
///
/// Note that the position of the sphere is locked in baked lighting mode.
fn move_sphere(
mouse_button_input: Res<ButtonInput<MouseButton>>,
pointers: Query<&PointerInteraction>,
mut meshes: Query<(&Name, &ChildOf), With<Mesh3d>>,
mut transforms: Query<&mut Transform>,
app_status: Res<AppStatus>,
) {
// Only run when the left button is clicked and we're not in baked lighting
// mode.
if app_status.lighting_mode == LightingMode::Baked
|| !mouse_button_input.pressed(MouseButton::Left)
{
return;
}
// Find the sphere.
let Some(child_of) = meshes
.iter_mut()
.filter_map(|(name, child_of)| {
if &**name == "Sphere" {
Some(child_of)
} else {
None
}
})
.next()
else {
return;
};
// Grab its transform.
let Ok(mut transform) = transforms.get_mut(child_of.parent()) else {
return;
};
// Set its transform to the appropriate position, as determined by the
// picking subsystem.
for interaction in pointers.iter() {
if let Some(&(
_,
HitData {
position: Some(position),
..
},
)) = interaction.get_nearest_hit()
{
transform.translation = position + vec3(0.0, SPHERE_OFFSET, 0.0);
}
}
}
/// Changes the help text at the top of the screen when the lighting mode
/// changes.
fn adjust_help_text(
mut commands: Commands,
help_texts: Query<Entity, With<HelpText>>,
app_status: Res<AppStatus>,
mut lighting_mode_change_event_reader: EventReader<LightingModeChanged>,
) {
if lighting_mode_change_event_reader.read().next().is_none() {
return;
}
for help_text in &help_texts {
commands
.entity(help_text)
.insert(create_help_text(&app_status));
}
}
/// Returns appropriate text to display at the top of the screen.
fn create_help_text(app_status: &AppStatus) -> Text {
match app_status.lighting_mode {
LightingMode::Baked => Text::new(
"Scenery: Static, baked direct light, baked indirect light
Sphere: Static, baked direct light, baked indirect light",
),
LightingMode::MixedDirect => Text::new(
"Scenery: Static, baked direct light, baked indirect light
Sphere: Dynamic, real-time direct light, no indirect light
Click in the scene to move the sphere",
),
LightingMode::MixedIndirect => Text::new(
"Scenery: Static, real-time direct light, baked indirect light
Sphere: Dynamic, real-time direct light, no indirect light
Click in the scene to move the sphere",
),
LightingMode::RealTime => Text::new(
"Scenery: Dynamic, real-time direct light, no indirect light
Sphere: Dynamic, real-time direct light, no indirect light
Click in the scene to move the sphere",
),
}
}

389
vendor/bevy/examples/3d/motion_blur.rs vendored Normal file
View File

@@ -0,0 +1,389 @@
//! Demonstrates how to enable per-object motion blur. This rendering feature can be configured per
//! camera using the [`MotionBlur`] component.z
use bevy::{
core_pipeline::motion_blur::MotionBlur,
image::{ImageAddressMode, ImageFilterMode, ImageSampler, ImageSamplerDescriptor},
math::ops,
prelude::*,
};
fn main() {
let mut app = App::new();
app.add_plugins(DefaultPlugins)
.add_systems(Startup, (setup_camera, setup_scene, setup_ui))
.add_systems(Update, (keyboard_inputs, move_cars, move_camera).chain())
.run();
}
fn setup_camera(mut commands: Commands) {
commands.spawn((
Camera3d::default(),
// Add the `MotionBlur` component to a camera to enable motion blur.
// Motion blur requires the depth and motion vector prepass, which this bundle adds.
// Configure the amount and quality of motion blur per-camera using this component.
MotionBlur {
shutter_angle: 1.0,
samples: 2,
},
// MSAA and Motion Blur together are not compatible on WebGL
#[cfg(all(feature = "webgl2", target_arch = "wasm32", not(feature = "webgpu")))]
Msaa::Off,
));
}
// Everything past this point is used to build the example, but isn't required to use motion blur.
#[derive(Resource)]
enum CameraMode {
Track,
Chase,
}
#[derive(Component)]
struct Moves(f32);
#[derive(Component)]
struct CameraTracked;
#[derive(Component)]
struct Rotates;
fn setup_scene(
asset_server: Res<AssetServer>,
mut images: ResMut<Assets<Image>>,
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
commands.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 300.0,
..default()
});
commands.insert_resource(CameraMode::Chase);
commands.spawn((
DirectionalLight {
illuminance: 3_000.0,
shadows_enabled: true,
..default()
},
Transform::default().looking_to(Vec3::new(-1.0, -0.7, -1.0), Vec3::X),
));
// Sky
commands.spawn((
Mesh3d(meshes.add(Sphere::default())),
MeshMaterial3d(materials.add(StandardMaterial {
unlit: true,
base_color: Color::linear_rgb(0.1, 0.6, 1.0),
..default()
})),
Transform::default().with_scale(Vec3::splat(-4000.0)),
));
// Ground
let mut plane: Mesh = Plane3d::default().into();
let uv_size = 4000.0;
let uvs = vec![[uv_size, 0.0], [0.0, 0.0], [0.0, uv_size], [uv_size; 2]];
plane.insert_attribute(Mesh::ATTRIBUTE_UV_0, uvs);
commands.spawn((
Mesh3d(meshes.add(plane)),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::WHITE,
perceptual_roughness: 1.0,
base_color_texture: Some(images.add(uv_debug_texture())),
..default()
})),
Transform::from_xyz(0.0, -0.65, 0.0).with_scale(Vec3::splat(80.)),
));
spawn_cars(&asset_server, &mut meshes, &mut materials, &mut commands);
spawn_trees(&mut meshes, &mut materials, &mut commands);
spawn_barriers(&mut meshes, &mut materials, &mut commands);
}
fn spawn_cars(
asset_server: &AssetServer,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
commands: &mut Commands,
) {
const N_CARS: usize = 20;
let box_mesh = meshes.add(Cuboid::new(0.3, 0.15, 0.55));
let cylinder = meshes.add(Cylinder::default());
let logo = asset_server.load("branding/icon.png");
let wheel_matl = materials.add(StandardMaterial {
base_color: Color::WHITE,
base_color_texture: Some(logo.clone()),
..default()
});
let mut matl = |color| {
materials.add(StandardMaterial {
base_color: color,
..default()
})
};
let colors = [
matl(Color::linear_rgb(1.0, 0.0, 0.0)),
matl(Color::linear_rgb(1.0, 1.0, 0.0)),
matl(Color::BLACK),
matl(Color::linear_rgb(0.0, 0.0, 1.0)),
matl(Color::linear_rgb(0.0, 1.0, 0.0)),
matl(Color::linear_rgb(1.0, 0.0, 1.0)),
matl(Color::linear_rgb(0.5, 0.5, 0.0)),
matl(Color::linear_rgb(1.0, 0.5, 0.0)),
];
for i in 0..N_CARS {
let color = colors[i % colors.len()].clone();
commands
.spawn((
Mesh3d(box_mesh.clone()),
MeshMaterial3d(color.clone()),
Transform::from_scale(Vec3::splat(0.5)),
Moves(i as f32 * 2.0),
))
.insert_if(CameraTracked, || i == 0)
.with_children(|parent| {
parent.spawn((
Mesh3d(box_mesh.clone()),
MeshMaterial3d(color),
Transform::from_xyz(0.0, 0.08, 0.03).with_scale(Vec3::new(1.0, 1.0, 0.5)),
));
let mut spawn_wheel = |x: f32, z: f32| {
parent.spawn((
Mesh3d(cylinder.clone()),
MeshMaterial3d(wheel_matl.clone()),
Transform::from_xyz(0.14 * x, -0.045, 0.15 * z)
.with_scale(Vec3::new(0.15, 0.04, 0.15))
.with_rotation(Quat::from_rotation_z(std::f32::consts::FRAC_PI_2)),
Rotates,
));
};
spawn_wheel(1.0, 1.0);
spawn_wheel(1.0, -1.0);
spawn_wheel(-1.0, 1.0);
spawn_wheel(-1.0, -1.0);
});
}
}
fn spawn_barriers(
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
commands: &mut Commands,
) {
const N_CONES: usize = 100;
let capsule = meshes.add(Capsule3d::default());
let matl = materials.add(StandardMaterial {
base_color: Color::srgb_u8(255, 87, 51),
reflectance: 1.0,
..default()
});
let mut spawn_with_offset = |offset: f32| {
for i in 0..N_CONES {
let pos = race_track_pos(
offset,
(i as f32) / (N_CONES as f32) * std::f32::consts::PI * 2.0,
);
commands.spawn((
Mesh3d(capsule.clone()),
MeshMaterial3d(matl.clone()),
Transform::from_xyz(pos.x, -0.65, pos.y).with_scale(Vec3::splat(0.07)),
));
}
};
spawn_with_offset(0.04);
spawn_with_offset(-0.04);
}
fn spawn_trees(
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
commands: &mut Commands,
) {
const N_TREES: usize = 30;
let capsule = meshes.add(Capsule3d::default());
let sphere = meshes.add(Sphere::default());
let leaves = materials.add(Color::linear_rgb(0.0, 1.0, 0.0));
let trunk = materials.add(Color::linear_rgb(0.4, 0.2, 0.2));
let mut spawn_with_offset = |offset: f32| {
for i in 0..N_TREES {
let pos = race_track_pos(
offset,
(i as f32) / (N_TREES as f32) * std::f32::consts::PI * 2.0,
);
let [x, z] = pos.into();
commands.spawn((
Mesh3d(sphere.clone()),
MeshMaterial3d(leaves.clone()),
Transform::from_xyz(x, -0.3, z).with_scale(Vec3::splat(0.3)),
));
commands.spawn((
Mesh3d(capsule.clone()),
MeshMaterial3d(trunk.clone()),
Transform::from_xyz(x, -0.5, z).with_scale(Vec3::new(0.05, 0.3, 0.05)),
));
}
};
spawn_with_offset(0.07);
spawn_with_offset(-0.07);
}
fn setup_ui(mut commands: Commands) {
commands
.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
))
.with_children(|p| {
p.spawn(TextSpan::default());
p.spawn(TextSpan::default());
p.spawn(TextSpan::new("1/2: -/+ shutter angle (blur amount)\n"));
p.spawn(TextSpan::new("3/4: -/+ sample count (blur quality)\n"));
p.spawn(TextSpan::new("Spacebar: cycle camera\n"));
});
}
fn keyboard_inputs(
mut motion_blur: Single<&mut MotionBlur>,
presses: Res<ButtonInput<KeyCode>>,
text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
mut camera: ResMut<CameraMode>,
) {
if presses.just_pressed(KeyCode::Digit1) {
motion_blur.shutter_angle -= 0.25;
} else if presses.just_pressed(KeyCode::Digit2) {
motion_blur.shutter_angle += 0.25;
} else if presses.just_pressed(KeyCode::Digit3) {
motion_blur.samples = motion_blur.samples.saturating_sub(1);
} else if presses.just_pressed(KeyCode::Digit4) {
motion_blur.samples += 1;
} else if presses.just_pressed(KeyCode::Space) {
*camera = match *camera {
CameraMode::Track => CameraMode::Chase,
CameraMode::Chase => CameraMode::Track,
};
}
motion_blur.shutter_angle = motion_blur.shutter_angle.clamp(0.0, 1.0);
motion_blur.samples = motion_blur.samples.clamp(0, 64);
let entity = *text;
*writer.text(entity, 1) = format!("Shutter angle: {:.2}\n", motion_blur.shutter_angle);
*writer.text(entity, 2) = format!("Samples: {:.5}\n", motion_blur.samples);
}
/// Parametric function for a looping race track. `offset` will return the point offset
/// perpendicular to the track at the given point.
fn race_track_pos(offset: f32, t: f32) -> Vec2 {
let x_tweak = 2.0;
let y_tweak = 3.0;
let scale = 8.0;
let x0 = ops::sin(x_tweak * t);
let y0 = ops::cos(y_tweak * t);
let dx = x_tweak * ops::cos(x_tweak * t);
let dy = y_tweak * -ops::sin(y_tweak * t);
let dl = ops::hypot(dx, dy);
let x = x0 + offset * dy / dl;
let y = y0 - offset * dx / dl;
Vec2::new(x, y) * scale
}
fn move_cars(
time: Res<Time>,
mut movables: Query<(&mut Transform, &Moves, &Children)>,
mut spins: Query<&mut Transform, (Without<Moves>, With<Rotates>)>,
) {
for (mut transform, moves, children) in &mut movables {
let time = time.elapsed_secs() * 0.25;
let t = time + 0.5 * moves.0;
let dx = ops::cos(t);
let dz = -ops::sin(3.0 * t);
let speed_variation = (dx * dx + dz * dz).sqrt() * 0.15;
let t = t + speed_variation;
let prev = transform.translation;
transform.translation.x = race_track_pos(0.0, t).x;
transform.translation.z = race_track_pos(0.0, t).y;
transform.translation.y = -0.59;
let delta = transform.translation - prev;
transform.look_to(delta, Vec3::Y);
for child in children.iter() {
let Ok(mut wheel) = spins.get_mut(child) else {
continue;
};
let radius = wheel.scale.x;
let circumference = 2.0 * std::f32::consts::PI * radius;
let angle = delta.length() / circumference * std::f32::consts::PI * 2.0;
wheel.rotate_local_y(angle);
}
}
}
fn move_camera(
camera: Single<(&mut Transform, &mut Projection), Without<CameraTracked>>,
tracked: Single<&Transform, With<CameraTracked>>,
mode: Res<CameraMode>,
) {
let (mut transform, mut projection) = camera.into_inner();
match *mode {
CameraMode::Track => {
transform.look_at(tracked.translation, Vec3::Y);
transform.translation = Vec3::new(15.0, -0.5, 0.0);
if let Projection::Perspective(perspective) = &mut *projection {
perspective.fov = 0.05;
}
}
CameraMode::Chase => {
transform.translation =
tracked.translation + Vec3::new(0.0, 0.15, 0.0) + tracked.back() * 0.6;
transform.look_to(tracked.forward(), Vec3::Y);
if let Projection::Perspective(perspective) = &mut *projection {
perspective.fov = 1.0;
}
}
}
}
fn uv_debug_texture() -> Image {
use bevy::render::{render_asset::RenderAssetUsages, render_resource::*};
const TEXTURE_SIZE: usize = 7;
let mut palette = [
164, 164, 164, 255, 168, 168, 168, 255, 153, 153, 153, 255, 139, 139, 139, 255, 153, 153,
153, 255, 177, 177, 177, 255, 159, 159, 159, 255,
];
let mut texture_data = [0; TEXTURE_SIZE * TEXTURE_SIZE * 4];
for y in 0..TEXTURE_SIZE {
let offset = TEXTURE_SIZE * y * 4;
texture_data[offset..(offset + TEXTURE_SIZE * 4)].copy_from_slice(&palette);
palette.rotate_right(12);
}
let mut img = Image::new_fill(
Extent3d {
width: TEXTURE_SIZE as u32,
height: TEXTURE_SIZE as u32,
depth_or_array_layers: 1,
},
TextureDimension::D2,
&texture_data,
TextureFormat::Rgba8UnormSrgb,
RenderAssetUsages::RENDER_WORLD,
);
img.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor {
address_mode_u: ImageAddressMode::Repeat,
address_mode_v: ImageAddressMode::MirrorRepeat,
mag_filter: ImageFilterMode::Nearest,
..ImageSamplerDescriptor::linear()
});
img
}

View File

@@ -0,0 +1,698 @@
//! Demonstrates occlusion culling.
//!
//! This demo rotates many small cubes around a rotating large cube at the
//! origin. At all times, the large cube will be occluding several of the small
//! cubes. The demo displays the number of cubes that were actually rendered, so
//! the effects of occlusion culling can be seen.
use std::{
any::TypeId,
f32::consts::PI,
fmt::Write as _,
result::Result,
sync::{Arc, Mutex},
};
use bevy::{
color::palettes::css::{SILVER, WHITE},
core_pipeline::{
core_3d::{
graph::{Core3d, Node3d},
Opaque3d,
},
prepass::DepthPrepass,
},
pbr::PbrPlugin,
prelude::*,
render::{
batching::gpu_preprocessing::{
GpuPreprocessingMode, GpuPreprocessingSupport, IndirectParametersBuffers,
IndirectParametersIndexed,
},
experimental::occlusion_culling::OcclusionCulling,
render_graph::{self, NodeRunError, RenderGraphApp, RenderGraphContext, RenderLabel},
render_resource::{Buffer, BufferDescriptor, BufferUsages, MapMode},
renderer::{RenderContext, RenderDevice},
settings::WgpuFeatures,
Render, RenderApp, RenderDebugFlags, RenderPlugin, RenderSet,
},
};
use bytemuck::Pod;
/// The radius of the spinning sphere of cubes.
const OUTER_RADIUS: f32 = 3.0;
/// The density of cubes in the other sphere.
const OUTER_SUBDIVISION_COUNT: u32 = 5;
/// The speed at which the outer sphere and large cube rotate in radians per
/// frame.
const ROTATION_SPEED: f32 = 0.01;
/// The length of each side of the small cubes, in meters.
const SMALL_CUBE_SIZE: f32 = 0.1;
/// The length of each side of the large cube, in meters.
const LARGE_CUBE_SIZE: f32 = 2.0;
/// A marker component for the immediate parent of the large sphere of cubes.
#[derive(Default, Component)]
struct SphereParent;
/// A marker component for the large spinning cube at the origin.
#[derive(Default, Component)]
struct LargeCube;
/// A plugin for the render app that reads the number of culled meshes from the
/// GPU back to the CPU.
struct ReadbackIndirectParametersPlugin;
/// The node that we insert into the render graph in order to read the number of
/// culled meshes from the GPU back to the CPU.
#[derive(Default)]
struct ReadbackIndirectParametersNode;
/// The [`RenderLabel`] that we use to identify the
/// [`ReadbackIndirectParametersNode`].
#[derive(Clone, PartialEq, Eq, Hash, Debug, RenderLabel)]
struct ReadbackIndirectParameters;
/// The intermediate staging buffers that we use to read back the indirect
/// parameters from the GPU to the CPU.
///
/// We read back the GPU indirect parameters so that we can determine the number
/// of meshes that were culled.
///
/// `wgpu` doesn't allow us to read indirect buffers back from the GPU to the
/// CPU directly. Instead, we have to copy them to a temporary staging buffer
/// first, and then read *those* buffers back from the GPU to the CPU. This
/// resource holds those temporary buffers.
#[derive(Resource, Default)]
struct IndirectParametersStagingBuffers {
/// The buffer that stores the indirect draw commands.
///
/// See [`IndirectParametersIndexed`] for more information about the memory
/// layout of this buffer.
data: Option<Buffer>,
/// The buffer that stores the *number* of indirect draw commands.
///
/// We only care about the first `u32` in this buffer.
batch_sets: Option<Buffer>,
}
/// A resource, shared between the main world and the render world, that saves a
/// CPU-side copy of the GPU buffer that stores the indirect draw parameters.
///
/// This is needed so that we can display the number of meshes that were culled.
/// It's reference counted, and protected by a lock, because we don't precisely
/// know when the GPU will be ready to present the CPU with the buffer copy.
/// Even though the rendering runs at least a frame ahead of the main app logic,
/// we don't require more precise synchronization than the lock because we don't
/// really care how up-to-date the counter of culled meshes is. If it's off by a
/// few frames, that's no big deal.
#[derive(Clone, Resource, Deref, DerefMut)]
struct SavedIndirectParameters(Arc<Mutex<SavedIndirectParametersData>>);
/// A CPU-side copy of the GPU buffer that stores the indirect draw parameters.
///
/// This is needed so that we can display the number of meshes that were culled.
struct SavedIndirectParametersData {
/// The CPU-side copy of the GPU buffer that stores the indirect draw
/// parameters.
data: Vec<IndirectParametersIndexed>,
/// The CPU-side copy of the GPU buffer that stores the *number* of indirect
/// draw parameters that we have.
///
/// All we care about is the number of indirect draw parameters for a single
/// view, so this is only one word in size.
count: u32,
/// True if occlusion culling is supported at all; false if it's not.
occlusion_culling_supported: bool,
/// True if we support inspecting the number of meshes that were culled on
/// this platform; false if we don't.
///
/// If `multi_draw_indirect_count` isn't supported, then we would have to
/// employ a more complicated approach in order to determine the number of
/// meshes that are occluded, and that would be out of scope for this
/// example.
occlusion_culling_introspection_supported: bool,
}
impl FromWorld for SavedIndirectParameters {
fn from_world(world: &mut World) -> SavedIndirectParameters {
let render_device = world.resource::<RenderDevice>();
SavedIndirectParameters(Arc::new(Mutex::new(SavedIndirectParametersData {
data: vec![],
count: 0,
// This gets set to false in `readback_indirect_buffers` if we don't
// support GPU preprocessing.
occlusion_culling_supported: true,
// In order to determine how many meshes were culled, we look at the
// indirect count buffer that Bevy only populates if the platform
// supports `multi_draw_indirect_count`. So, if we don't have that
// feature, then we don't bother to display how many meshes were
// culled.
occlusion_culling_introspection_supported: render_device
.features()
.contains(WgpuFeatures::MULTI_DRAW_INDIRECT_COUNT),
})))
}
}
/// The demo's current settings.
#[derive(Resource)]
struct AppStatus {
/// Whether occlusion culling is presently enabled.
///
/// By default, this is set to true.
occlusion_culling: bool,
}
impl Default for AppStatus {
fn default() -> Self {
AppStatus {
occlusion_culling: true,
}
}
}
fn main() {
let render_debug_flags = RenderDebugFlags::ALLOW_COPIES_FROM_INDIRECT_PARAMETERS;
App::new()
.add_plugins(
DefaultPlugins
.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Occlusion Culling Example".into(),
..default()
}),
..default()
})
.set(RenderPlugin {
debug_flags: render_debug_flags,
..default()
})
.set(PbrPlugin {
debug_flags: render_debug_flags,
..default()
}),
)
.add_plugins(ReadbackIndirectParametersPlugin)
.init_resource::<AppStatus>()
.add_systems(Startup, setup)
.add_systems(Update, spin_small_cubes)
.add_systems(Update, spin_large_cube)
.add_systems(Update, update_status_text)
.add_systems(Update, toggle_occlusion_culling_on_request)
.run();
}
impl Plugin for ReadbackIndirectParametersPlugin {
fn build(&self, app: &mut App) {
// Fetch the render app.
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
return;
};
render_app
.init_resource::<IndirectParametersStagingBuffers>()
.add_systems(ExtractSchedule, readback_indirect_parameters)
.add_systems(
Render,
create_indirect_parameters_staging_buffers.in_set(RenderSet::PrepareResourcesFlush),
)
// Add the node that allows us to read the indirect parameters back
// from the GPU to the CPU, which allows us to determine how many
// meshes were culled.
.add_render_graph_node::<ReadbackIndirectParametersNode>(
Core3d,
ReadbackIndirectParameters,
)
// We read back the indirect parameters any time after
// `EndMainPass`. Readback doesn't particularly need to execute
// before `EndMainPassPostProcessing`, but we specify that anyway
// because we want to make the indirect parameters run before
// *something* in the graph, and `EndMainPassPostProcessing` is a
// good a node as any other.
.add_render_graph_edges(
Core3d,
(
Node3d::EndMainPass,
ReadbackIndirectParameters,
Node3d::EndMainPassPostProcessing,
),
);
}
fn finish(&self, app: &mut App) {
// Create the `SavedIndirectParameters` resource that we're going to use
// to communicate between the thread that the GPU-to-CPU readback
// callback runs on and the main application threads. This resource is
// atomically reference counted. We store one reference to the
// `SavedIndirectParameters` in the main app and another reference in
// the render app.
let saved_indirect_parameters = SavedIndirectParameters::from_world(app.world_mut());
app.insert_resource(saved_indirect_parameters.clone());
// Fetch the render app.
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
return;
};
render_app
// Insert another reference to the `SavedIndirectParameters`.
.insert_resource(saved_indirect_parameters);
}
}
/// Spawns all the objects in the scene.
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
spawn_small_cubes(&mut commands, &mut meshes, &mut materials);
spawn_large_cube(&mut commands, &asset_server, &mut meshes, &mut materials);
spawn_light(&mut commands);
spawn_camera(&mut commands);
spawn_help_text(&mut commands);
}
/// Spawns the rotating sphere of small cubes.
fn spawn_small_cubes(
commands: &mut Commands,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
) {
// Add the cube mesh.
let small_cube = meshes.add(Cuboid::new(
SMALL_CUBE_SIZE,
SMALL_CUBE_SIZE,
SMALL_CUBE_SIZE,
));
// Add the cube material.
let small_cube_material = materials.add(StandardMaterial {
base_color: SILVER.into(),
..default()
});
// Create the entity that the small cubes will be parented to. This is the
// entity that we rotate.
let sphere_parent = commands
.spawn(Transform::from_translation(Vec3::ZERO))
.insert(Visibility::default())
.insert(SphereParent)
.id();
// Now we have to figure out where to place the cubes. To do that, we create
// a sphere mesh, but we don't add it to the scene. Instead, we inspect the
// sphere mesh to find the positions of its vertices, and spawn a small cube
// at each one. That way, we end up with a bunch of cubes arranged in a
// spherical shape.
// Create the sphere mesh, and extract the positions of its vertices.
let sphere = Sphere::new(OUTER_RADIUS)
.mesh()
.ico(OUTER_SUBDIVISION_COUNT)
.unwrap();
let sphere_positions = sphere.attribute(Mesh::ATTRIBUTE_POSITION).unwrap();
// At each vertex, create a small cube.
for sphere_position in sphere_positions.as_float3().unwrap() {
let sphere_position = Vec3::from_slice(sphere_position);
let small_cube = commands
.spawn(Mesh3d(small_cube.clone()))
.insert(MeshMaterial3d(small_cube_material.clone()))
.insert(Transform::from_translation(sphere_position))
.id();
commands.entity(sphere_parent).add_child(small_cube);
}
}
/// Spawns the large cube at the center of the screen.
///
/// This cube rotates chaotically and occludes small cubes behind it.
fn spawn_large_cube(
commands: &mut Commands,
asset_server: &AssetServer,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
) {
commands
.spawn(Mesh3d(meshes.add(Cuboid::new(
LARGE_CUBE_SIZE,
LARGE_CUBE_SIZE,
LARGE_CUBE_SIZE,
))))
.insert(MeshMaterial3d(materials.add(StandardMaterial {
base_color: WHITE.into(),
base_color_texture: Some(asset_server.load("branding/icon.png")),
..default()
})))
.insert(Transform::IDENTITY)
.insert(LargeCube);
}
// Spins the outer sphere a bit every frame.
//
// This ensures that the set of cubes that are hidden and shown varies over
// time.
fn spin_small_cubes(mut sphere_parents: Query<&mut Transform, With<SphereParent>>) {
for mut sphere_parent_transform in &mut sphere_parents {
sphere_parent_transform.rotate_y(ROTATION_SPEED);
}
}
/// Spins the large cube a bit every frame.
///
/// The chaotic rotation adds a bit of randomness to the scene to better
/// demonstrate the dynamicity of the occlusion culling.
fn spin_large_cube(mut large_cubes: Query<&mut Transform, With<LargeCube>>) {
for mut transform in &mut large_cubes {
transform.rotate(Quat::from_euler(
EulerRot::XYZ,
0.13 * ROTATION_SPEED,
0.29 * ROTATION_SPEED,
0.35 * ROTATION_SPEED,
));
}
}
/// Spawns a directional light to illuminate the scene.
fn spawn_light(commands: &mut Commands) {
commands
.spawn(DirectionalLight::default())
.insert(Transform::from_rotation(Quat::from_euler(
EulerRot::ZYX,
0.0,
PI * -0.15,
PI * -0.15,
)));
}
/// Spawns a camera that includes the depth prepass and occlusion culling.
fn spawn_camera(commands: &mut Commands) {
commands
.spawn(Camera3d::default())
.insert(Transform::from_xyz(0.0, 0.0, 9.0).looking_at(Vec3::ZERO, Vec3::Y))
.insert(DepthPrepass)
.insert(OcclusionCulling);
}
/// Spawns the help text at the upper left of the screen.
fn spawn_help_text(commands: &mut Commands) {
commands.spawn((
Text::new(""),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
impl render_graph::Node for ReadbackIndirectParametersNode {
fn run<'w>(
&self,
_: &mut RenderGraphContext,
render_context: &mut RenderContext<'w>,
world: &'w World,
) -> Result<(), NodeRunError> {
// Extract the buffers that hold the GPU indirect draw parameters from
// the world resources. We're going to read those buffers to determine
// how many meshes were actually drawn.
let (Some(indirect_parameters_buffers), Some(indirect_parameters_mapping_buffers)) = (
world.get_resource::<IndirectParametersBuffers>(),
world.get_resource::<IndirectParametersStagingBuffers>(),
) else {
return Ok(());
};
// Get the indirect parameters buffers corresponding to the opaque 3D
// phase, since all our meshes are in that phase.
let Some(phase_indirect_parameters_buffers) =
indirect_parameters_buffers.get(&TypeId::of::<Opaque3d>())
else {
return Ok(());
};
// Grab both the buffers we're copying from and the staging buffers
// we're copying to. Remember that we can't map the indirect parameters
// buffers directly, so we have to copy their contents to a staging
// buffer.
let (
Some(indexed_data_buffer),
Some(indexed_batch_sets_buffer),
Some(indirect_parameters_staging_data_buffer),
Some(indirect_parameters_staging_batch_sets_buffer),
) = (
phase_indirect_parameters_buffers.indexed.data_buffer(),
phase_indirect_parameters_buffers
.indexed
.batch_sets_buffer(),
indirect_parameters_mapping_buffers.data.as_ref(),
indirect_parameters_mapping_buffers.batch_sets.as_ref(),
)
else {
return Ok(());
};
// Copy from the indirect parameters buffers to the staging buffers.
render_context.command_encoder().copy_buffer_to_buffer(
indexed_data_buffer,
0,
indirect_parameters_staging_data_buffer,
0,
indexed_data_buffer.size(),
);
render_context.command_encoder().copy_buffer_to_buffer(
indexed_batch_sets_buffer,
0,
indirect_parameters_staging_batch_sets_buffer,
0,
indexed_batch_sets_buffer.size(),
);
Ok(())
}
}
/// Creates the staging buffers that we use to read back the indirect parameters
/// from the GPU to the CPU.
///
/// We read the indirect parameters from the GPU to the CPU in order to display
/// the number of meshes that were culled each frame.
///
/// We need these staging buffers because `wgpu` doesn't allow us to read the
/// contents of the indirect parameters buffers directly. We must first copy
/// them from the GPU to a staging buffer, and then read the staging buffer.
fn create_indirect_parameters_staging_buffers(
mut indirect_parameters_staging_buffers: ResMut<IndirectParametersStagingBuffers>,
indirect_parameters_buffers: Res<IndirectParametersBuffers>,
render_device: Res<RenderDevice>,
) {
let Some(phase_indirect_parameters_buffers) =
indirect_parameters_buffers.get(&TypeId::of::<Opaque3d>())
else {
return;
};
// Fetch the indirect parameters buffers that we're going to copy from.
let (Some(indexed_data_buffer), Some(indexed_batch_set_buffer)) = (
phase_indirect_parameters_buffers.indexed.data_buffer(),
phase_indirect_parameters_buffers
.indexed
.batch_sets_buffer(),
) else {
return;
};
// Build the staging buffers. Make sure they have the same sizes as the
// buffers we're copying from.
indirect_parameters_staging_buffers.data =
Some(render_device.create_buffer(&BufferDescriptor {
label: Some("indexed data staging buffer"),
size: indexed_data_buffer.size(),
usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST,
mapped_at_creation: false,
}));
indirect_parameters_staging_buffers.batch_sets =
Some(render_device.create_buffer(&BufferDescriptor {
label: Some("indexed batch set staging buffer"),
size: indexed_batch_set_buffer.size(),
usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST,
mapped_at_creation: false,
}));
}
/// Updates the app status text at the top of the screen.
fn update_status_text(
saved_indirect_parameters: Res<SavedIndirectParameters>,
mut texts: Query<&mut Text>,
meshes: Query<Entity, With<Mesh3d>>,
app_status: Res<AppStatus>,
) {
// How many meshes are in the scene?
let total_mesh_count = meshes.iter().count();
// Sample the rendered object count. Note that we don't synchronize beyond
// locking the data and therefore this will value will generally at least
// one frame behind. This is fine; this app is just a demonstration after
// all.
let (
rendered_object_count,
occlusion_culling_supported,
occlusion_culling_introspection_supported,
): (u32, bool, bool) = {
let saved_indirect_parameters = saved_indirect_parameters.lock().unwrap();
(
saved_indirect_parameters
.data
.iter()
.take(saved_indirect_parameters.count as usize)
.map(|indirect_parameters| indirect_parameters.instance_count)
.sum(),
saved_indirect_parameters.occlusion_culling_supported,
saved_indirect_parameters.occlusion_culling_introspection_supported,
)
};
// Change the text.
for mut text in &mut texts {
text.0 = String::new();
if !occlusion_culling_supported {
text.0
.push_str("Occlusion culling not supported on this platform");
continue;
}
let _ = writeln!(
&mut text.0,
"Occlusion culling {} (Press Space to toggle)",
if app_status.occlusion_culling {
"ON"
} else {
"OFF"
},
);
if !occlusion_culling_introspection_supported {
continue;
}
let _ = write!(
&mut text.0,
"{}/{} meshes rendered",
rendered_object_count, total_mesh_count
);
}
}
/// A system that reads the indirect parameters back from the GPU so that we can
/// report how many meshes were culled.
fn readback_indirect_parameters(
mut indirect_parameters_staging_buffers: ResMut<IndirectParametersStagingBuffers>,
saved_indirect_parameters: Res<SavedIndirectParameters>,
gpu_preprocessing_support: Res<GpuPreprocessingSupport>,
) {
// If culling isn't supported on this platform, note that, and bail.
if gpu_preprocessing_support.max_supported_mode != GpuPreprocessingMode::Culling {
saved_indirect_parameters
.lock()
.unwrap()
.occlusion_culling_supported = false;
return;
}
// Grab the staging buffers.
let (Some(data_buffer), Some(batch_sets_buffer)) = (
indirect_parameters_staging_buffers.data.take(),
indirect_parameters_staging_buffers.batch_sets.take(),
) else {
return;
};
// Read the GPU buffers back.
let saved_indirect_parameters_0 = (**saved_indirect_parameters).clone();
let saved_indirect_parameters_1 = (**saved_indirect_parameters).clone();
readback_buffer::<IndirectParametersIndexed>(data_buffer, move |indirect_parameters| {
saved_indirect_parameters_0.lock().unwrap().data = indirect_parameters.to_vec();
});
readback_buffer::<u32>(batch_sets_buffer, move |indirect_parameters_count| {
saved_indirect_parameters_1.lock().unwrap().count = indirect_parameters_count[0];
});
}
// A helper function to asynchronously read an array of [`Pod`] values back from
// the GPU to the CPU.
//
// The given callback is invoked when the data is ready. The buffer will
// automatically be unmapped after the callback executes.
fn readback_buffer<T>(buffer: Buffer, callback: impl FnOnce(&[T]) + Send + 'static)
where
T: Pod,
{
// We need to make another reference to the buffer so that we can move the
// original reference into the closure below.
let original_buffer = buffer.clone();
original_buffer
.slice(..)
.map_async(MapMode::Read, move |result| {
// Make sure we succeeded.
if result.is_err() {
return;
}
{
// Cast the raw bytes in the GPU buffer to the appropriate type.
let buffer_view = buffer.slice(..).get_mapped_range();
let indirect_parameters: &[T] = bytemuck::cast_slice(
&buffer_view[0..(buffer_view.len() / size_of::<T>() * size_of::<T>())],
);
// Invoke the callback.
callback(indirect_parameters);
}
// Unmap the buffer. We have to do this before submitting any more
// GPU command buffers, or `wgpu` will assert.
buffer.unmap();
});
}
/// Adds or removes the [`OcclusionCulling`] and [`DepthPrepass`] components
/// when the user presses the spacebar.
fn toggle_occlusion_culling_on_request(
mut commands: Commands,
input: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
cameras: Query<Entity, With<Camera3d>>,
) {
// Only run when the user presses the spacebar.
if !input.just_pressed(KeyCode::Space) {
return;
}
// Toggle the occlusion culling flag in `AppStatus`.
app_status.occlusion_culling = !app_status.occlusion_culling;
// Add or remove the `OcclusionCulling` and `DepthPrepass` components as
// requested.
for camera in &cameras {
if app_status.occlusion_culling {
commands
.entity(camera)
.insert(DepthPrepass)
.insert(OcclusionCulling);
} else {
commands
.entity(camera)
.remove::<DepthPrepass>()
.remove::<OcclusionCulling>();
}
}
}

View File

@@ -0,0 +1,241 @@
//! A simple 3D scene showing how alpha blending can break and how order independent transparency (OIT) can fix it.
//!
//! See [`OrderIndependentTransparencyPlugin`] for the trade-offs of using OIT.
//!
//! [`OrderIndependentTransparencyPlugin`]: bevy::render::pipeline::OrderIndependentTransparencyPlugin
use bevy::{
color::palettes::css::{BLUE, GREEN, RED},
core_pipeline::oit::OrderIndependentTransparencySettings,
prelude::*,
render::view::RenderLayers,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, (toggle_oit, cycle_scenes))
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 0.0, 10.0).looking_at(Vec3::ZERO, Vec3::Y),
// Add this component to this camera to render transparent meshes using OIT
OrderIndependentTransparencySettings::default(),
RenderLayers::layer(1),
// Msaa currently doesn't work with OIT
Msaa::Off,
));
// light
commands.spawn((
PointLight {
shadows_enabled: false,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
RenderLayers::layer(1),
));
// spawn help text
commands
.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
RenderLayers::layer(1),
))
.with_children(|p| {
p.spawn(TextSpan::new("Press T to toggle OIT\n"));
p.spawn(TextSpan::new("OIT Enabled"));
p.spawn(TextSpan::new("\nPress C to cycle test scenes"));
});
// spawn default scene
spawn_spheres(&mut commands, &mut meshes, &mut materials);
}
fn toggle_oit(
mut commands: Commands,
text: Single<Entity, With<Text>>,
keyboard_input: Res<ButtonInput<KeyCode>>,
q: Single<(Entity, Has<OrderIndependentTransparencySettings>), With<Camera3d>>,
mut text_writer: TextUiWriter,
) {
if keyboard_input.just_pressed(KeyCode::KeyT) {
let (e, has_oit) = *q;
*text_writer.text(*text, 2) = if has_oit {
// Removing the component will completely disable OIT for this camera
commands
.entity(e)
.remove::<OrderIndependentTransparencySettings>();
"OIT disabled".to_string()
} else {
// Adding the component to the camera will render any transparent meshes
// with OIT instead of alpha blending
commands
.entity(e)
.insert(OrderIndependentTransparencySettings::default());
"OIT enabled".to_string()
};
}
}
fn cycle_scenes(
mut commands: Commands,
keyboard_input: Res<ButtonInput<KeyCode>>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
q: Query<Entity, With<Mesh3d>>,
mut scene_id: Local<usize>,
) {
if keyboard_input.just_pressed(KeyCode::KeyC) {
// despawn current scene
for e in &q {
commands.entity(e).despawn();
}
// increment scene_id
*scene_id = (*scene_id + 1) % 2;
// spawn next scene
match *scene_id {
0 => spawn_spheres(&mut commands, &mut meshes, &mut materials),
1 => spawn_occlusion_test(&mut commands, &mut meshes, &mut materials),
_ => unreachable!(),
}
}
}
/// Spawns 3 overlapping spheres
/// Technically, when using `alpha_to_coverage` with MSAA this particular example wouldn't break,
/// but it breaks when disabling MSAA and is enough to show the difference between OIT enabled vs disabled.
fn spawn_spheres(
commands: &mut Commands,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
) {
let pos_a = Vec3::new(-1.0, 0.75, 0.0);
let pos_b = Vec3::new(0.0, -0.75, 0.0);
let pos_c = Vec3::new(1.0, 0.75, 0.0);
let offset = Vec3::new(0.0, 0.0, 0.0);
let sphere_handle = meshes.add(Sphere::new(2.0).mesh());
let alpha = 0.25;
let render_layers = RenderLayers::layer(1);
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: RED.with_alpha(alpha).into(),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_translation(pos_a + offset),
render_layers.clone(),
));
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: GREEN.with_alpha(alpha).into(),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_translation(pos_b + offset),
render_layers.clone(),
));
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: BLUE.with_alpha(alpha).into(),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_translation(pos_c + offset),
render_layers.clone(),
));
}
/// Spawn a combination of opaque cubes and transparent spheres.
/// This is useful to make sure transparent meshes drawn with OIT
/// are properly occluded by opaque meshes.
fn spawn_occlusion_test(
commands: &mut Commands,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
) {
let sphere_handle = meshes.add(Sphere::new(1.0).mesh());
let cube_handle = meshes.add(Cuboid::from_size(Vec3::ONE).mesh());
let cube_material = materials.add(Color::srgb(0.8, 0.7, 0.6));
let render_layers = RenderLayers::layer(1);
// front
let x = -2.5;
commands.spawn((
Mesh3d(cube_handle.clone()),
MeshMaterial3d(cube_material.clone()),
Transform::from_xyz(x, 0.0, 2.0),
render_layers.clone(),
));
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: RED.with_alpha(0.5).into(),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_xyz(x, 0., 0.),
render_layers.clone(),
));
// intersection
commands.spawn((
Mesh3d(cube_handle.clone()),
MeshMaterial3d(cube_material.clone()),
Transform::from_xyz(x, 0.0, 1.0),
render_layers.clone(),
));
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: RED.with_alpha(0.5).into(),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_xyz(0., 0., 0.),
render_layers.clone(),
));
// back
let x = 2.5;
commands.spawn((
Mesh3d(cube_handle.clone()),
MeshMaterial3d(cube_material.clone()),
Transform::from_xyz(x, 0.0, -2.0),
render_layers.clone(),
));
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: RED.with_alpha(0.5).into(),
alpha_mode: AlphaMode::Blend,
..default()
})),
Transform::from_xyz(x, 0., 0.),
render_layers.clone(),
));
}

59
vendor/bevy/examples/3d/orthographic.rs vendored Normal file
View File

@@ -0,0 +1,59 @@
//! Shows how to create a 3D orthographic view (for isometric-look games or CAD applications).
use bevy::{prelude::*, render::camera::ScalingMode};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// camera
commands.spawn((
Camera3d::default(),
Projection::from(OrthographicProjection {
// 6 world units per pixel of window height.
scaling_mode: ScalingMode::FixedVertical {
viewport_height: 6.0,
},
..OrthographicProjection::default_3d()
}),
Transform::from_xyz(5.0, 5.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// cubes
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
Transform::from_xyz(1.5, 0.5, 1.5),
));
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
Transform::from_xyz(1.5, 0.5, -1.5),
));
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
Transform::from_xyz(-1.5, 0.5, 1.5),
));
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
Transform::from_xyz(-1.5, 0.5, -1.5),
));
// light
commands.spawn((PointLight::default(), Transform::from_xyz(3.0, 8.0, 5.0)));
}

View File

@@ -0,0 +1,323 @@
//! A simple 3D scene with a spinning cube with a normal map and depth map to demonstrate parallax mapping.
//! Press left mouse button to cycle through different views.
use std::fmt;
use bevy::{image::ImageLoaderSettings, math::ops, prelude::*};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(
Update,
(
spin,
move_camera,
update_parallax_depth_scale,
update_parallax_layers,
switch_method,
),
)
.run();
}
#[derive(Component)]
struct Spin {
speed: f32,
}
/// The camera, used to move camera on click.
#[derive(Component)]
struct CameraController;
const DEPTH_CHANGE_RATE: f32 = 0.1;
const DEPTH_UPDATE_STEP: f32 = 0.03;
const MAX_DEPTH: f32 = 0.3;
struct TargetDepth(f32);
impl Default for TargetDepth {
fn default() -> Self {
TargetDepth(0.09)
}
}
struct TargetLayers(f32);
impl Default for TargetLayers {
fn default() -> Self {
TargetLayers(5.0)
}
}
struct CurrentMethod(ParallaxMappingMethod);
impl Default for CurrentMethod {
fn default() -> Self {
CurrentMethod(ParallaxMappingMethod::Relief { max_steps: 4 })
}
}
impl fmt::Display for CurrentMethod {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
ParallaxMappingMethod::Occlusion => write!(f, "Parallax Occlusion Mapping"),
ParallaxMappingMethod::Relief { max_steps } => {
write!(f, "Relief Mapping with {max_steps} steps")
}
}
}
}
impl CurrentMethod {
fn next_method(&mut self) {
use ParallaxMappingMethod::*;
self.0 = match self.0 {
Occlusion => Relief { max_steps: 2 },
Relief { max_steps } if max_steps < 3 => Relief { max_steps: 4 },
Relief { max_steps } if max_steps < 5 => Relief { max_steps: 8 },
Relief { .. } => Occlusion,
}
}
}
fn update_parallax_depth_scale(
input: Res<ButtonInput<KeyCode>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut target_depth: Local<TargetDepth>,
mut depth_update: Local<bool>,
mut writer: TextUiWriter,
text: Single<Entity, With<Text>>,
) {
if input.just_pressed(KeyCode::Digit1) {
target_depth.0 -= DEPTH_UPDATE_STEP;
target_depth.0 = target_depth.0.max(0.0);
*depth_update = true;
}
if input.just_pressed(KeyCode::Digit2) {
target_depth.0 += DEPTH_UPDATE_STEP;
target_depth.0 = target_depth.0.min(MAX_DEPTH);
*depth_update = true;
}
if *depth_update {
for (_, mat) in materials.iter_mut() {
let current_depth = mat.parallax_depth_scale;
let new_depth = current_depth.lerp(target_depth.0, DEPTH_CHANGE_RATE);
mat.parallax_depth_scale = new_depth;
*writer.text(*text, 1) = format!("Parallax depth scale: {new_depth:.5}\n");
if (new_depth - current_depth).abs() <= 0.000000001 {
*depth_update = false;
}
}
}
}
fn switch_method(
input: Res<ButtonInput<KeyCode>>,
mut materials: ResMut<Assets<StandardMaterial>>,
text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
mut current: Local<CurrentMethod>,
) {
if input.just_pressed(KeyCode::Space) {
current.next_method();
} else {
return;
}
let text_entity = *text;
*writer.text(text_entity, 3) = format!("Method: {}\n", *current);
for (_, mat) in materials.iter_mut() {
mat.parallax_mapping_method = current.0;
}
}
fn update_parallax_layers(
input: Res<ButtonInput<KeyCode>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut target_layers: Local<TargetLayers>,
text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
if input.just_pressed(KeyCode::Digit3) {
target_layers.0 -= 1.0;
target_layers.0 = target_layers.0.max(0.0);
} else if input.just_pressed(KeyCode::Digit4) {
target_layers.0 += 1.0;
} else {
return;
}
let layer_count = ops::exp2(target_layers.0);
let text_entity = *text;
*writer.text(text_entity, 2) = format!("Layers: {layer_count:.0}\n");
for (_, mat) in materials.iter_mut() {
mat.max_parallax_layer_count = layer_count;
}
}
fn spin(time: Res<Time>, mut query: Query<(&mut Transform, &Spin)>) {
for (mut transform, spin) in query.iter_mut() {
transform.rotate_local_y(spin.speed * time.delta_secs());
transform.rotate_local_x(spin.speed * time.delta_secs());
transform.rotate_local_z(-spin.speed * time.delta_secs());
}
}
// Camera positions to cycle through when left-clicking.
const CAMERA_POSITIONS: &[Transform] = &[
Transform {
translation: Vec3::new(1.5, 1.5, 1.5),
rotation: Quat::from_xyzw(-0.279, 0.364, 0.115, 0.880),
scale: Vec3::ONE,
},
Transform {
translation: Vec3::new(2.4, 0.0, 0.2),
rotation: Quat::from_xyzw(0.094, 0.676, 0.116, 0.721),
scale: Vec3::ONE,
},
Transform {
translation: Vec3::new(2.4, 2.6, -4.3),
rotation: Quat::from_xyzw(0.170, 0.908, 0.308, 0.225),
scale: Vec3::ONE,
},
Transform {
translation: Vec3::new(-1.0, 0.8, -1.2),
rotation: Quat::from_xyzw(-0.004, 0.909, 0.247, -0.335),
scale: Vec3::ONE,
},
];
fn move_camera(
mut camera: Single<&mut Transform, With<CameraController>>,
mut current_view: Local<usize>,
button: Res<ButtonInput<MouseButton>>,
) {
if button.just_pressed(MouseButton::Left) {
*current_view = (*current_view + 1) % CAMERA_POSITIONS.len();
}
let target = CAMERA_POSITIONS[*current_view];
camera.translation = camera.translation.lerp(target.translation, 0.2);
camera.rotation = camera.rotation.slerp(target.rotation, 0.2);
}
fn setup(
mut commands: Commands,
mut materials: ResMut<Assets<StandardMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
asset_server: Res<AssetServer>,
) {
// The normal map. Note that to generate it in the GIMP image editor, you should
// open the depth map, and do Filters → Generic → Normal Map
// You should enable the "flip X" checkbox.
let normal_handle = asset_server.load_with_settings(
"textures/parallax_example/cube_normal.png",
// The normal map texture is in linear color space. Lighting won't look correct
// if `is_srgb` is `true`, which is the default.
|settings: &mut ImageLoaderSettings| settings.is_srgb = false,
);
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(1.5, 1.5, 1.5).looking_at(Vec3::ZERO, Vec3::Y),
CameraController,
));
// light
commands
.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(2.0, 1.0, -1.1),
))
.with_children(|commands| {
// represent the light source as a sphere
let mesh = meshes.add(Sphere::new(0.05).mesh().ico(3).unwrap());
commands.spawn((Mesh3d(mesh), MeshMaterial3d(materials.add(Color::WHITE))));
});
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(10.0, 10.0))),
MeshMaterial3d(materials.add(StandardMaterial {
// standard material derived from dark green, but
// with roughness and reflectance set.
perceptual_roughness: 0.45,
reflectance: 0.18,
..Color::srgb_u8(0, 80, 0).into()
})),
Transform::from_xyz(0.0, -1.0, 0.0),
));
let parallax_depth_scale = TargetDepth::default().0;
let max_parallax_layer_count = ops::exp2(TargetLayers::default().0);
let parallax_mapping_method = CurrentMethod::default();
let parallax_material = materials.add(StandardMaterial {
perceptual_roughness: 0.4,
base_color_texture: Some(asset_server.load("textures/parallax_example/cube_color.png")),
normal_map_texture: Some(normal_handle),
// The depth map is a grayscale texture where black is the highest level and
// white the lowest.
depth_map: Some(asset_server.load("textures/parallax_example/cube_depth.png")),
parallax_depth_scale,
parallax_mapping_method: parallax_mapping_method.0,
max_parallax_layer_count,
..default()
});
commands.spawn((
Mesh3d(
meshes.add(
// NOTE: for normal maps and depth maps to work, the mesh
// needs tangents generated.
Mesh::from(Cuboid::default())
.with_generated_tangents()
.unwrap(),
),
),
MeshMaterial3d(parallax_material.clone()),
Spin { speed: 0.3 },
));
let background_cube = meshes.add(
Mesh::from(Cuboid::new(40.0, 40.0, 40.0))
.with_generated_tangents()
.unwrap(),
);
let background_cube_bundle = |translation| {
(
Mesh3d(background_cube.clone()),
MeshMaterial3d(parallax_material.clone()),
Transform::from_translation(translation),
Spin { speed: -0.1 },
)
};
commands.spawn(background_cube_bundle(Vec3::new(45., 0., 0.)));
commands.spawn(background_cube_bundle(Vec3::new(-45., 0., 0.)));
commands.spawn(background_cube_bundle(Vec3::new(0., 0., 45.)));
commands.spawn(background_cube_bundle(Vec3::new(0., 0., -45.)));
// example instructions
commands
.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
))
.with_children(|p| {
p.spawn(TextSpan(format!(
"Parallax depth scale: {parallax_depth_scale:.5}\n"
)));
p.spawn(TextSpan(format!("Layers: {max_parallax_layer_count:.0}\n")));
p.spawn(TextSpan(format!("{parallax_mapping_method}\n")));
p.spawn(TextSpan::new("\n\n"));
p.spawn(TextSpan::new("Controls:\n"));
p.spawn(TextSpan::new("Left click - Change view angle\n"));
p.spawn(TextSpan::new(
"1/2 - Decrease/Increase parallax depth scale\n",
));
p.spawn(TextSpan::new("3/4 - Decrease/Increase layer count\n"));
p.spawn(TextSpan::new("Space - Switch parallaxing algorithm\n"));
});
}

60
vendor/bevy/examples/3d/parenting.rs vendored Normal file
View File

@@ -0,0 +1,60 @@
//! Illustrates how to create parent-child relationships between entities and how parent transforms
//! are propagated to their descendants.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, rotator_system)
.run();
}
/// this component indicates what entities should rotate
#[derive(Component)]
struct Rotator;
/// rotates the parent, which will result in the child also rotating
fn rotator_system(time: Res<Time>, mut query: Query<&mut Transform, With<Rotator>>) {
for mut transform in &mut query {
transform.rotate_x(3.0 * time.delta_secs());
}
}
/// set up a simple scene with a "parent" cube and a "child" cube
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
let cube_handle = meshes.add(Cuboid::new(2.0, 2.0, 2.0));
let cube_material_handle = materials.add(StandardMaterial {
base_color: Color::srgb(0.8, 0.7, 0.6),
..default()
});
// parent cube
commands
.spawn((
Mesh3d(cube_handle.clone()),
MeshMaterial3d(cube_material_handle.clone()),
Transform::from_xyz(0.0, 0.0, 1.0),
Rotator,
))
.with_children(|parent| {
// child cube
parent.spawn((
Mesh3d(cube_handle),
MeshMaterial3d(cube_material_handle),
Transform::from_xyz(0.0, 0.0, 3.0),
));
});
// light
commands.spawn((PointLight::default(), Transform::from_xyz(4.0, 5.0, -4.0)));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(5.0, 10.0, 10.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}

148
vendor/bevy/examples/3d/pbr.rs vendored Normal file
View File

@@ -0,0 +1,148 @@
//! This example shows how to configure Physically Based Rendering (PBR) parameters.
use bevy::prelude::*;
use bevy::render::camera::ScalingMode;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, environment_map_load_finish)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
let sphere_mesh = meshes.add(Sphere::new(0.45));
// add entities to the world
for y in -2..=2 {
for x in -5..=5 {
let x01 = (x + 5) as f32 / 10.0;
let y01 = (y + 2) as f32 / 4.0;
// sphere
commands.spawn((
Mesh3d(sphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("#ffd891").unwrap().into(),
// vary key PBR parameters on a grid of spheres to show the effect
metallic: y01,
perceptual_roughness: x01,
..default()
})),
Transform::from_xyz(x as f32, y as f32 + 0.5, 0.0),
));
}
}
// unlit sphere
commands.spawn((
Mesh3d(sphere_mesh),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("#ffd891").unwrap().into(),
// vary key PBR parameters on a grid of spheres to show the effect
unlit: true,
..default()
})),
Transform::from_xyz(-5.0, -2.5, 0.0),
));
commands.spawn((
DirectionalLight {
illuminance: 1_500.,
..default()
},
Transform::from_xyz(50.0, 50.0, 50.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// labels
commands.spawn((
Text::new("Perceptual Roughness"),
TextFont {
font_size: 30.0,
..default()
},
Node {
position_type: PositionType::Absolute,
top: Val::Px(20.0),
left: Val::Px(100.0),
..default()
},
));
commands.spawn((
Text::new("Metallic"),
TextFont {
font_size: 30.0,
..default()
},
Node {
position_type: PositionType::Absolute,
top: Val::Px(130.0),
right: Val::ZERO,
..default()
},
Transform {
rotation: Quat::from_rotation_z(std::f32::consts::PI / 2.0),
..default()
},
));
commands.spawn((
Text::new("Loading Environment Map..."),
TextFont {
font_size: 30.0,
..default()
},
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(20.0),
right: Val::Px(20.0),
..default()
},
EnvironmentMapLabel,
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 0.0, 8.0).looking_at(Vec3::default(), Vec3::Y),
Projection::from(OrthographicProjection {
scale: 0.01,
scaling_mode: ScalingMode::WindowSize,
..OrthographicProjection::default_3d()
}),
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 900.0,
..default()
},
));
}
fn environment_map_load_finish(
mut commands: Commands,
asset_server: Res<AssetServer>,
environment_map: Single<&EnvironmentMapLight>,
label_entity: Option<Single<Entity, With<EnvironmentMapLabel>>>,
) {
if asset_server
.load_state(&environment_map.diffuse_map)
.is_loaded()
&& asset_server
.load_state(&environment_map.specular_map)
.is_loaded()
{
// Do not attempt to remove `label_entity` if it has already been removed.
if let Some(label_entity) = label_entity {
commands.entity(*label_entity).despawn();
}
}
}
#[derive(Component)]
struct EnvironmentMapLabel;

410
vendor/bevy/examples/3d/pcss.rs vendored Normal file
View File

@@ -0,0 +1,410 @@
//! Demonstrates percentage-closer soft shadows (PCSS).
use std::f32::consts::PI;
use bevy::{
core_pipeline::{
experimental::taa::{TemporalAntiAliasPlugin, TemporalAntiAliasing},
prepass::{DepthPrepass, MotionVectorPrepass},
Skybox,
},
math::vec3,
pbr::{CubemapVisibleEntities, ShadowFilteringMethod, VisibleMeshEntities},
prelude::*,
render::{
camera::TemporalJitter,
primitives::{CubemapFrusta, Frustum},
},
};
use crate::widgets::{RadioButton, RadioButtonText, WidgetClickEvent, WidgetClickSender};
#[path = "../helpers/widgets.rs"]
mod widgets;
/// The size of the light, which affects the size of the penumbras.
const LIGHT_RADIUS: f32 = 10.0;
/// The intensity of the point and spot lights.
const POINT_LIGHT_INTENSITY: f32 = 1_000_000_000.0;
/// The range in meters of the point and spot lights.
const POINT_LIGHT_RANGE: f32 = 110.0;
/// The depth bias for directional and spot lights. This value is set higher
/// than the default to avoid shadow acne.
const DIRECTIONAL_SHADOW_DEPTH_BIAS: f32 = 0.20;
/// The depth bias for point lights. This value is set higher than the default to
/// avoid shadow acne.
///
/// Unfortunately, there is a bit of Peter Panning with this value, because of
/// the distance and angle of the light. This can't be helped in this scene
/// without increasing the shadow map size beyond reasonable limits.
const POINT_SHADOW_DEPTH_BIAS: f32 = 0.35;
/// The near Z value for the shadow map, in meters. This is set higher than the
/// default in order to achieve greater resolution in the shadow map for point
/// and spot lights.
const SHADOW_MAP_NEAR_Z: f32 = 50.0;
/// The current application settings (light type, shadow filter, and the status
/// of PCSS).
#[derive(Resource)]
struct AppStatus {
/// The type of light presently in the scene: either directional or point.
light_type: LightType,
/// The type of shadow filter: Gaussian or temporal.
shadow_filter: ShadowFilter,
/// Whether soft shadows are enabled.
soft_shadows: bool,
}
impl Default for AppStatus {
fn default() -> Self {
Self {
light_type: default(),
shadow_filter: default(),
soft_shadows: true,
}
}
}
/// The type of light presently in the scene: directional, point, or spot.
#[derive(Clone, Copy, Default, PartialEq)]
enum LightType {
/// A directional light, with a cascaded shadow map.
#[default]
Directional,
/// A point light, with a cube shadow map.
Point,
/// A spot light, with a cube shadow map.
Spot,
}
/// The type of shadow filter.
///
/// Generally, `Gaussian` is preferred when temporal antialiasing isn't in use,
/// while `Temporal` is preferred when TAA is in use. In this example, this
/// setting also turns TAA on and off.
#[derive(Clone, Copy, Default, PartialEq)]
enum ShadowFilter {
/// The non-temporal Gaussian filter (Castano '13 for directional lights, an
/// analogous alternative for point and spot lights).
#[default]
NonTemporal,
/// The temporal Gaussian filter (Jimenez '14 for directional lights, an
/// analogous alternative for point and spot lights).
Temporal,
}
/// Each example setting that can be toggled in the UI.
#[derive(Clone, Copy, PartialEq)]
enum AppSetting {
/// The type of light presently in the scene: directional, point, or spot.
LightType(LightType),
/// The type of shadow filter.
ShadowFilter(ShadowFilter),
/// Whether PCSS is enabled or disabled.
SoftShadows(bool),
}
/// The example application entry point.
fn main() {
App::new()
.init_resource::<AppStatus>()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Percentage Closer Soft Shadows Example".into(),
..default()
}),
..default()
}))
.add_plugins(TemporalAntiAliasPlugin)
.add_event::<WidgetClickEvent<AppSetting>>()
.add_systems(Startup, setup)
.add_systems(Update, widgets::handle_ui_interactions::<AppSetting>)
.add_systems(
Update,
update_radio_buttons.after(widgets::handle_ui_interactions::<AppSetting>),
)
.add_systems(
Update,
(
handle_light_type_change,
handle_shadow_filter_change,
handle_pcss_toggle,
)
.after(widgets::handle_ui_interactions::<AppSetting>),
)
.run();
}
/// Creates all the objects in the scene.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_status: Res<AppStatus>) {
spawn_camera(&mut commands, &asset_server);
spawn_light(&mut commands, &app_status);
spawn_gltf_scene(&mut commands, &asset_server);
spawn_buttons(&mut commands);
}
/// Spawns the camera, with the initial shadow filtering method.
fn spawn_camera(commands: &mut Commands, asset_server: &AssetServer) {
commands
.spawn((
Camera3d::default(),
Transform::from_xyz(-12.912 * 0.7, 4.466 * 0.7, -10.624 * 0.7).with_rotation(
Quat::from_euler(EulerRot::YXZ, -134.76 / 180.0 * PI, -0.175, 0.0),
),
))
.insert(ShadowFilteringMethod::Gaussian)
// `TemporalJitter` is needed for TAA. Note that it does nothing without
// `TemporalAntiAliasSettings`.
.insert(TemporalJitter::default())
// We want MSAA off for TAA to work properly.
.insert(Msaa::Off)
// The depth prepass is needed for TAA.
.insert(DepthPrepass)
// The motion vector prepass is needed for TAA.
.insert(MotionVectorPrepass)
// Add a nice skybox.
.insert(Skybox {
image: asset_server.load("environment_maps/sky_skybox.ktx2"),
brightness: 500.0,
rotation: Quat::IDENTITY,
});
}
/// Spawns the initial light.
fn spawn_light(commands: &mut Commands, app_status: &AppStatus) {
// Because this light can become a directional light, point light, or spot
// light depending on the settings, we add the union of the components
// necessary for this light to behave as all three of those.
commands
.spawn((
create_directional_light(app_status),
Transform::from_rotation(Quat::from_array([
0.6539259,
-0.34646285,
0.36505926,
-0.5648683,
]))
.with_translation(vec3(57.693, 34.334, -6.422)),
))
// These two are needed for point lights.
.insert(CubemapVisibleEntities::default())
.insert(CubemapFrusta::default())
// These two are needed for spot lights.
.insert(VisibleMeshEntities::default())
.insert(Frustum::default());
}
/// Loads and spawns the glTF palm tree scene.
fn spawn_gltf_scene(commands: &mut Commands, asset_server: &AssetServer) {
commands.spawn(SceneRoot(
asset_server.load("models/PalmTree/PalmTree.gltf#Scene0"),
));
}
/// Spawns all the buttons at the bottom of the screen.
fn spawn_buttons(commands: &mut Commands) {
commands
.spawn(widgets::main_ui_node())
.with_children(|parent| {
widgets::spawn_option_buttons(
parent,
"Light Type",
&[
(AppSetting::LightType(LightType::Directional), "Directional"),
(AppSetting::LightType(LightType::Point), "Point"),
(AppSetting::LightType(LightType::Spot), "Spot"),
],
);
widgets::spawn_option_buttons(
parent,
"Shadow Filter",
&[
(AppSetting::ShadowFilter(ShadowFilter::Temporal), "Temporal"),
(
AppSetting::ShadowFilter(ShadowFilter::NonTemporal),
"Non-Temporal",
),
],
);
widgets::spawn_option_buttons(
parent,
"Soft Shadows",
&[
(AppSetting::SoftShadows(true), "On"),
(AppSetting::SoftShadows(false), "Off"),
],
);
});
}
/// Updates the style of the radio buttons that enable and disable soft shadows
/// to reflect whether PCSS is enabled.
fn update_radio_buttons(
mut widgets: Query<
(
Entity,
Option<&mut BackgroundColor>,
Has<Text>,
&WidgetClickSender<AppSetting>,
),
Or<(With<RadioButton>, With<RadioButtonText>)>,
>,
app_status: Res<AppStatus>,
mut writer: TextUiWriter,
) {
for (entity, image, has_text, sender) in widgets.iter_mut() {
let selected = match **sender {
AppSetting::LightType(light_type) => light_type == app_status.light_type,
AppSetting::ShadowFilter(shadow_filter) => shadow_filter == app_status.shadow_filter,
AppSetting::SoftShadows(soft_shadows) => soft_shadows == app_status.soft_shadows,
};
if let Some(mut bg_color) = image {
widgets::update_ui_radio_button(&mut bg_color, selected);
}
if has_text {
widgets::update_ui_radio_button_text(entity, &mut writer, selected);
}
}
}
/// Handles requests from the user to change the type of light.
fn handle_light_type_change(
mut commands: Commands,
mut lights: Query<Entity, Or<(With<DirectionalLight>, With<PointLight>, With<SpotLight>)>>,
mut events: EventReader<WidgetClickEvent<AppSetting>>,
mut app_status: ResMut<AppStatus>,
) {
for event in events.read() {
let AppSetting::LightType(light_type) = **event else {
continue;
};
app_status.light_type = light_type;
for light in lights.iter_mut() {
let mut light_commands = commands.entity(light);
light_commands
.remove::<DirectionalLight>()
.remove::<PointLight>()
.remove::<SpotLight>();
match light_type {
LightType::Point => {
light_commands.insert(create_point_light(&app_status));
}
LightType::Spot => {
light_commands.insert(create_spot_light(&app_status));
}
LightType::Directional => {
light_commands.insert(create_directional_light(&app_status));
}
}
}
}
}
/// Handles requests from the user to change the shadow filter method.
///
/// This system is also responsible for enabling and disabling TAA as
/// appropriate.
fn handle_shadow_filter_change(
mut commands: Commands,
mut cameras: Query<(Entity, &mut ShadowFilteringMethod)>,
mut events: EventReader<WidgetClickEvent<AppSetting>>,
mut app_status: ResMut<AppStatus>,
) {
for event in events.read() {
let AppSetting::ShadowFilter(shadow_filter) = **event else {
continue;
};
app_status.shadow_filter = shadow_filter;
for (camera, mut shadow_filtering_method) in cameras.iter_mut() {
match shadow_filter {
ShadowFilter::NonTemporal => {
*shadow_filtering_method = ShadowFilteringMethod::Gaussian;
commands.entity(camera).remove::<TemporalAntiAliasing>();
}
ShadowFilter::Temporal => {
*shadow_filtering_method = ShadowFilteringMethod::Temporal;
commands
.entity(camera)
.insert(TemporalAntiAliasing::default());
}
}
}
}
}
/// Handles requests from the user to toggle soft shadows on and off.
fn handle_pcss_toggle(
mut lights: Query<AnyOf<(&mut DirectionalLight, &mut PointLight, &mut SpotLight)>>,
mut events: EventReader<WidgetClickEvent<AppSetting>>,
mut app_status: ResMut<AppStatus>,
) {
for event in events.read() {
let AppSetting::SoftShadows(value) = **event else {
continue;
};
app_status.soft_shadows = value;
// Recreating the lights is the simplest way to toggle soft shadows.
for (directional_light, point_light, spot_light) in lights.iter_mut() {
if let Some(mut directional_light) = directional_light {
*directional_light = create_directional_light(&app_status);
}
if let Some(mut point_light) = point_light {
*point_light = create_point_light(&app_status);
}
if let Some(mut spot_light) = spot_light {
*spot_light = create_spot_light(&app_status);
}
}
}
}
/// Creates the [`DirectionalLight`] component with the appropriate settings.
fn create_directional_light(app_status: &AppStatus) -> DirectionalLight {
DirectionalLight {
shadows_enabled: true,
soft_shadow_size: if app_status.soft_shadows {
Some(LIGHT_RADIUS)
} else {
None
},
shadow_depth_bias: DIRECTIONAL_SHADOW_DEPTH_BIAS,
..default()
}
}
/// Creates the [`PointLight`] component with the appropriate settings.
fn create_point_light(app_status: &AppStatus) -> PointLight {
PointLight {
intensity: POINT_LIGHT_INTENSITY,
range: POINT_LIGHT_RANGE,
shadows_enabled: true,
radius: LIGHT_RADIUS,
soft_shadows_enabled: app_status.soft_shadows,
shadow_depth_bias: POINT_SHADOW_DEPTH_BIAS,
shadow_map_near_z: SHADOW_MAP_NEAR_Z,
..default()
}
}
/// Creates the [`SpotLight`] component with the appropriate settings.
fn create_spot_light(app_status: &AppStatus) -> SpotLight {
SpotLight {
intensity: POINT_LIGHT_INTENSITY,
range: POINT_LIGHT_RANGE,
radius: LIGHT_RADIUS,
shadows_enabled: true,
soft_shadows_enabled: app_status.soft_shadows,
shadow_depth_bias: DIRECTIONAL_SHADOW_DEPTH_BIAS,
shadow_map_near_z: SHADOW_MAP_NEAR_Z,
..default()
}
}

View File

@@ -0,0 +1,200 @@
//! Demonstrates Bevy's built-in postprocessing features.
//!
//! Currently, this simply consists of chromatic aberration.
use std::f32::consts::PI;
use bevy::{
core_pipeline::post_process::ChromaticAberration, pbr::CascadeShadowConfigBuilder, prelude::*,
};
/// The number of units per frame to add to or subtract from intensity when the
/// arrow keys are held.
const CHROMATIC_ABERRATION_INTENSITY_ADJUSTMENT_SPEED: f32 = 0.002;
/// The maximum supported chromatic aberration intensity level.
const MAX_CHROMATIC_ABERRATION_INTENSITY: f32 = 0.4;
/// The settings that the user can control.
#[derive(Resource)]
struct AppSettings {
/// The intensity of the chromatic aberration effect.
chromatic_aberration_intensity: f32,
}
/// The entry point.
fn main() {
App::new()
.init_resource::<AppSettings>()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Chromatic Aberration Example".into(),
..default()
}),
..default()
}))
.add_systems(Startup, setup)
.add_systems(Update, handle_keyboard_input)
.add_systems(
Update,
(update_chromatic_aberration_settings, update_help_text)
.run_if(resource_changed::<AppSettings>)
.after(handle_keyboard_input),
)
.run();
}
/// Creates the example scene and spawns the UI.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_settings: Res<AppSettings>) {
// Spawn the camera.
spawn_camera(&mut commands, &asset_server);
// Create the scene.
spawn_scene(&mut commands, &asset_server);
// Spawn the help text.
spawn_text(&mut commands, &app_settings);
}
/// Spawns the camera, including the [`ChromaticAberration`] component.
fn spawn_camera(commands: &mut Commands, asset_server: &AssetServer) {
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y),
DistanceFog {
color: Color::srgb_u8(43, 44, 47),
falloff: FogFalloff::Linear {
start: 1.0,
end: 8.0,
},
..default()
},
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2000.0,
..default()
},
// Include the `ChromaticAberration` component.
ChromaticAberration::default(),
));
}
/// Spawns the scene.
///
/// This is just the tonemapping test scene, chosen for the fact that it uses a
/// variety of colors.
fn spawn_scene(commands: &mut Commands, asset_server: &AssetServer) {
// Spawn the main scene.
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/TonemappingTest/TonemappingTest.gltf"),
)));
// Spawn the flight helmet.
commands.spawn((
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
Transform::from_xyz(0.5, 0.0, -0.5).with_rotation(Quat::from_rotation_y(-0.15 * PI)),
));
// Spawn the light.
commands.spawn((
DirectionalLight {
illuminance: 15000.0,
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
CascadeShadowConfigBuilder {
maximum_distance: 3.0,
first_cascade_far_bound: 0.9,
..default()
}
.build(),
));
}
/// Spawns the help text at the bottom of the screen.
fn spawn_text(commands: &mut Commands, app_settings: &AppSettings) {
commands.spawn((
create_help_text(app_settings),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
impl Default for AppSettings {
fn default() -> Self {
Self {
chromatic_aberration_intensity: ChromaticAberration::default().intensity,
}
}
}
/// Creates help text at the bottom of the screen.
fn create_help_text(app_settings: &AppSettings) -> Text {
format!(
"Chromatic aberration intensity: {} (Press Left or Right to change)",
app_settings.chromatic_aberration_intensity
)
.into()
}
/// Handles requests from the user to change the chromatic aberration intensity.
fn handle_keyboard_input(mut app_settings: ResMut<AppSettings>, input: Res<ButtonInput<KeyCode>>) {
let mut delta = 0.0;
if input.pressed(KeyCode::ArrowLeft) {
delta -= CHROMATIC_ABERRATION_INTENSITY_ADJUSTMENT_SPEED;
} else if input.pressed(KeyCode::ArrowRight) {
delta += CHROMATIC_ABERRATION_INTENSITY_ADJUSTMENT_SPEED;
}
// If no arrow key was pressed, just bail out.
if delta == 0.0 {
return;
}
app_settings.chromatic_aberration_intensity = (app_settings.chromatic_aberration_intensity
+ delta)
.clamp(0.0, MAX_CHROMATIC_ABERRATION_INTENSITY);
}
/// Updates the [`ChromaticAberration`] settings per the [`AppSettings`].
fn update_chromatic_aberration_settings(
mut chromatic_aberration: Query<&mut ChromaticAberration>,
app_settings: Res<AppSettings>,
) {
let intensity = app_settings.chromatic_aberration_intensity;
// Pick a reasonable maximum sample size for the intensity to avoid an
// artifact whereby the individual samples appear instead of producing
// smooth streaks of color.
//
// Don't take this formula too seriously; it hasn't been heavily tuned.
let max_samples = ((intensity - 0.02) / (0.20 - 0.02) * 56.0 + 8.0)
.clamp(8.0, 64.0)
.round() as u32;
for mut chromatic_aberration in &mut chromatic_aberration {
chromatic_aberration.intensity = intensity;
chromatic_aberration.max_samples = max_samples;
}
}
/// Updates the help text at the bottom of the screen to reflect the current
/// [`AppSettings`].
fn update_help_text(mut text: Query<&mut Text>, app_settings: Res<AppSettings>) {
for mut text in text.iter_mut() {
*text = create_help_text(&app_settings);
}
}

View File

@@ -0,0 +1,69 @@
//! This example demonstrates how to query a [`StandardMaterial`] within a glTF scene.
//! It is particularly useful for glTF scenes with a mesh that consists of multiple primitives.
use std::f32::consts::PI;
use bevy::{gltf::GltfMaterialName, prelude::*, render::mesh::VertexAttributeValues};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, find_top_material_and_mesh)
.run();
}
fn find_top_material_and_mesh(
mut materials: ResMut<Assets<StandardMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
time: Res<Time>,
mat_query: Query<(
&MeshMaterial3d<StandardMaterial>,
&Mesh3d,
&GltfMaterialName,
)>,
) {
for (mat_handle, mesh_handle, name) in mat_query.iter() {
// locate a material by material name
if name.0 == "Top" {
if let Some(material) = materials.get_mut(mat_handle) {
if let Color::Hsla(ref mut hsla) = material.base_color {
*hsla = hsla.rotate_hue(time.delta_secs() * 100.0);
} else {
material.base_color = Color::from(Hsla::hsl(0.0, 0.9, 0.7));
}
}
if let Some(mesh) = meshes.get_mut(mesh_handle) {
if let Some(VertexAttributeValues::Float32x3(positions)) =
mesh.attribute_mut(Mesh::ATTRIBUTE_POSITION)
{
for position in positions {
*position = (
position[0],
1.5 + 0.5 * ops::sin(time.elapsed_secs() / 2.0),
position[2],
)
.into();
}
}
}
}
}
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(4.0, 4.0, 12.0).looking_at(Vec3::new(0.0, 0.0, 0.5), Vec3::Y),
));
commands.spawn((
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)),
DirectionalLight::default(),
));
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/GltfPrimitives/gltf_primitives.glb"),
)));
}

View File

@@ -0,0 +1,339 @@
//! This example shows how to place reflection probes in the scene.
//!
//! Press Space to switch between no reflections, environment map reflections
//! (i.e. the skybox only, not the cubes), and a full reflection probe that
//! reflects the skybox and the cubes. Press Enter to pause rotation.
//!
//! Reflection probes don't work on WebGL 2 or WebGPU.
use bevy::{core_pipeline::Skybox, prelude::*};
use std::{
f32::consts::PI,
fmt::{Display, Formatter, Result as FmtResult},
};
static STOP_ROTATION_HELP_TEXT: &str = "Press Enter to stop rotation";
static START_ROTATION_HELP_TEXT: &str = "Press Enter to start rotation";
static REFLECTION_MODE_HELP_TEXT: &str = "Press Space to switch reflection mode";
// The mode the application is in.
#[derive(Resource)]
struct AppStatus {
// Which environment maps the user has requested to display.
reflection_mode: ReflectionMode,
// Whether the user has requested the scene to rotate.
rotating: bool,
}
// Which environment maps the user has requested to display.
#[derive(Clone, Copy)]
enum ReflectionMode {
// No environment maps are shown.
None = 0,
// Only a world environment map is shown.
EnvironmentMap = 1,
// Both a world environment map and a reflection probe are present. The
// reflection probe is shown in the sphere.
ReflectionProbe = 2,
}
// The various reflection maps.
#[derive(Resource)]
struct Cubemaps {
// The blurry diffuse cubemap. This is used for both the world environment
// map and the reflection probe. (In reality you wouldn't do this, but this
// reduces complexity of this example a bit.)
diffuse: Handle<Image>,
// The specular cubemap that reflects the world, but not the cubes.
specular_environment_map: Handle<Image>,
// The specular cubemap that reflects both the world and the cubes.
specular_reflection_probe: Handle<Image>,
// The skybox cubemap image. This is almost the same as
// `specular_environment_map`.
skybox: Handle<Image>,
}
fn main() {
// Create the app.
App::new()
.add_plugins(DefaultPlugins)
.init_resource::<AppStatus>()
.init_resource::<Cubemaps>()
.add_systems(Startup, setup)
.add_systems(PreUpdate, add_environment_map_to_camera)
.add_systems(Update, change_reflection_type)
.add_systems(Update, toggle_rotation)
.add_systems(
Update,
rotate_camera
.after(toggle_rotation)
.after(change_reflection_type),
)
.add_systems(Update, update_text.after(rotate_camera))
.run();
}
// Spawns all the scene objects.
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
app_status: Res<AppStatus>,
cubemaps: Res<Cubemaps>,
) {
spawn_scene(&mut commands, &asset_server);
spawn_camera(&mut commands);
spawn_sphere(&mut commands, &mut meshes, &mut materials);
spawn_reflection_probe(&mut commands, &cubemaps);
spawn_text(&mut commands, &app_status);
}
// Spawns the cubes, light, and camera.
fn spawn_scene(commands: &mut Commands, asset_server: &AssetServer) {
commands.spawn(SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/cubes/Cubes.glb")),
));
}
// Spawns the camera.
fn spawn_camera(commands: &mut Commands) {
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(-6.483, 0.325, 4.381).looking_at(Vec3::ZERO, Vec3::Y),
));
}
// Creates the sphere mesh and spawns it.
fn spawn_sphere(
commands: &mut Commands,
meshes: &mut Assets<Mesh>,
materials: &mut Assets<StandardMaterial>,
) {
// Create a sphere mesh.
let sphere_mesh = meshes.add(Sphere::new(1.0).mesh().ico(7).unwrap());
// Create a sphere.
commands.spawn((
Mesh3d(sphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Srgba::hex("#ffd891").unwrap().into(),
metallic: 1.0,
perceptual_roughness: 0.0,
..StandardMaterial::default()
})),
));
}
// Spawns the reflection probe.
fn spawn_reflection_probe(commands: &mut Commands, cubemaps: &Cubemaps) {
commands.spawn((
LightProbe,
EnvironmentMapLight {
diffuse_map: cubemaps.diffuse.clone(),
specular_map: cubemaps.specular_reflection_probe.clone(),
intensity: 5000.0,
..default()
},
// 2.0 because the sphere's radius is 1.0 and we want to fully enclose it.
Transform::from_scale(Vec3::splat(2.0)),
));
}
// Spawns the help text.
fn spawn_text(commands: &mut Commands, app_status: &AppStatus) {
// Create the text.
commands.spawn((
app_status.create_text(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// Adds a world environment map to the camera. This separate system is needed because the camera is
// managed by the scene spawner, as it's part of the glTF file with the cubes, so we have to add
// the environment map after the fact.
fn add_environment_map_to_camera(
mut commands: Commands,
query: Query<Entity, Added<Camera3d>>,
cubemaps: Res<Cubemaps>,
) {
for camera_entity in query.iter() {
commands
.entity(camera_entity)
.insert(create_camera_environment_map_light(&cubemaps))
.insert(Skybox {
image: cubemaps.skybox.clone(),
brightness: 5000.0,
..default()
});
}
}
// A system that handles switching between different reflection modes.
fn change_reflection_type(
mut commands: Commands,
light_probe_query: Query<Entity, With<LightProbe>>,
camera_query: Query<Entity, With<Camera3d>>,
keyboard: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
cubemaps: Res<Cubemaps>,
) {
// Only do anything if space was pressed.
if !keyboard.just_pressed(KeyCode::Space) {
return;
}
// Switch reflection mode.
app_status.reflection_mode =
ReflectionMode::try_from((app_status.reflection_mode as u32 + 1) % 3).unwrap();
// Add or remove the light probe.
for light_probe in light_probe_query.iter() {
commands.entity(light_probe).despawn();
}
match app_status.reflection_mode {
ReflectionMode::None | ReflectionMode::EnvironmentMap => {}
ReflectionMode::ReflectionProbe => spawn_reflection_probe(&mut commands, &cubemaps),
}
// Add or remove the environment map from the camera.
for camera in camera_query.iter() {
match app_status.reflection_mode {
ReflectionMode::None => {
commands.entity(camera).remove::<EnvironmentMapLight>();
}
ReflectionMode::EnvironmentMap | ReflectionMode::ReflectionProbe => {
commands
.entity(camera)
.insert(create_camera_environment_map_light(&cubemaps));
}
}
}
}
// A system that handles enabling and disabling rotation.
fn toggle_rotation(keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>) {
if keyboard.just_pressed(KeyCode::Enter) {
app_status.rotating = !app_status.rotating;
}
}
// A system that updates the help text.
fn update_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) {
for mut text in text_query.iter_mut() {
*text = app_status.create_text();
}
}
impl TryFrom<u32> for ReflectionMode {
type Error = ();
fn try_from(value: u32) -> Result<Self, Self::Error> {
match value {
0 => Ok(ReflectionMode::None),
1 => Ok(ReflectionMode::EnvironmentMap),
2 => Ok(ReflectionMode::ReflectionProbe),
_ => Err(()),
}
}
}
impl Display for ReflectionMode {
fn fmt(&self, formatter: &mut Formatter<'_>) -> FmtResult {
let text = match *self {
ReflectionMode::None => "No reflections",
ReflectionMode::EnvironmentMap => "Environment map",
ReflectionMode::ReflectionProbe => "Reflection probe",
};
formatter.write_str(text)
}
}
impl AppStatus {
// Constructs the help text at the bottom of the screen based on the
// application status.
fn create_text(&self) -> Text {
let rotation_help_text = if self.rotating {
STOP_ROTATION_HELP_TEXT
} else {
START_ROTATION_HELP_TEXT
};
format!(
"{}\n{}\n{}",
self.reflection_mode, rotation_help_text, REFLECTION_MODE_HELP_TEXT
)
.into()
}
}
// Creates the world environment map light, used as a fallback if no reflection
// probe is applicable to a mesh.
fn create_camera_environment_map_light(cubemaps: &Cubemaps) -> EnvironmentMapLight {
EnvironmentMapLight {
diffuse_map: cubemaps.diffuse.clone(),
specular_map: cubemaps.specular_environment_map.clone(),
intensity: 5000.0,
..default()
}
}
// Rotates the camera a bit every frame.
fn rotate_camera(
time: Res<Time>,
mut camera_query: Query<&mut Transform, With<Camera3d>>,
app_status: Res<AppStatus>,
) {
if !app_status.rotating {
return;
}
for mut transform in camera_query.iter_mut() {
transform.translation = Vec2::from_angle(time.delta_secs() * PI / 5.0)
.rotate(transform.translation.xz())
.extend(transform.translation.y)
.xzy();
transform.look_at(Vec3::ZERO, Vec3::Y);
}
}
// Loads the cubemaps from the assets directory.
impl FromWorld for Cubemaps {
fn from_world(world: &mut World) -> Self {
// Just use the specular map for the skybox since it's not too blurry.
// In reality you wouldn't do this--you'd use a real skybox texture--but
// reusing the textures like this saves space in the Bevy repository.
let specular_map = world.load_asset("environment_maps/pisa_specular_rgb9e5_zstd.ktx2");
Cubemaps {
diffuse: world.load_asset("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_reflection_probe: world
.load_asset("environment_maps/cubes_reflection_probe_specular_rgb9e5_zstd.ktx2"),
specular_environment_map: specular_map.clone(),
skybox: specular_map,
}
}
}
impl Default for AppStatus {
fn default() -> Self {
Self {
reflection_mode: ReflectionMode::ReflectionProbe,
rotating: true,
}
}
}

View File

@@ -0,0 +1,137 @@
//! Shows how to render to a texture. Useful for mirrors, UI, or exporting images.
use std::f32::consts::PI;
use bevy::{
prelude::*,
render::{
render_asset::RenderAssetUsages,
render_resource::{Extent3d, TextureDimension, TextureFormat, TextureUsages},
view::RenderLayers,
},
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, (cube_rotator_system, rotator_system))
.run();
}
// Marks the first pass cube (rendered to a texture.)
#[derive(Component)]
struct FirstPassCube;
// Marks the main pass cube, to which the texture is applied.
#[derive(Component)]
struct MainPassCube;
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut images: ResMut<Assets<Image>>,
) {
let size = Extent3d {
width: 512,
height: 512,
..default()
};
// This is the texture that will be rendered to.
let mut image = Image::new_fill(
size,
TextureDimension::D2,
&[0, 0, 0, 0],
TextureFormat::Bgra8UnormSrgb,
RenderAssetUsages::default(),
);
// You need to set these texture usage flags in order to use the image as a render target
image.texture_descriptor.usage =
TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST | TextureUsages::RENDER_ATTACHMENT;
let image_handle = images.add(image);
let cube_handle = meshes.add(Cuboid::new(4.0, 4.0, 4.0));
let cube_material_handle = materials.add(StandardMaterial {
base_color: Color::srgb(0.8, 0.7, 0.6),
reflectance: 0.02,
unlit: false,
..default()
});
// This specifies the layer used for the first pass, which will be attached to the first pass camera and cube.
let first_pass_layer = RenderLayers::layer(1);
// The cube that will be rendered to the texture.
commands.spawn((
Mesh3d(cube_handle),
MeshMaterial3d(cube_material_handle),
Transform::from_translation(Vec3::new(0.0, 0.0, 1.0)),
FirstPassCube,
first_pass_layer.clone(),
));
// Light
// NOTE: we add the light to both layers so it affects both the rendered-to-texture cube, and the cube on which we display the texture
// Setting the layer to RenderLayers::layer(0) would cause the main view to be lit, but the rendered-to-texture cube to be unlit.
// Setting the layer to RenderLayers::layer(1) would cause the rendered-to-texture cube to be lit, but the main view to be unlit.
commands.spawn((
PointLight::default(),
Transform::from_translation(Vec3::new(0.0, 0.0, 10.0)),
RenderLayers::layer(0).with(1),
));
commands.spawn((
Camera3d::default(),
Camera {
target: image_handle.clone().into(),
clear_color: Color::WHITE.into(),
..default()
},
Transform::from_translation(Vec3::new(0.0, 0.0, 15.0)).looking_at(Vec3::ZERO, Vec3::Y),
first_pass_layer,
));
let cube_size = 4.0;
let cube_handle = meshes.add(Cuboid::new(cube_size, cube_size, cube_size));
// This material has the texture that has been rendered.
let material_handle = materials.add(StandardMaterial {
base_color_texture: Some(image_handle),
reflectance: 0.02,
unlit: false,
..default()
});
// Main pass cube, with material containing the rendered first pass texture.
commands.spawn((
Mesh3d(cube_handle),
MeshMaterial3d(material_handle),
Transform::from_xyz(0.0, 0.0, 1.5).with_rotation(Quat::from_rotation_x(-PI / 5.0)),
MainPassCube,
));
// The main pass camera.
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 0.0, 15.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}
/// Rotates the inner cube (first pass)
fn rotator_system(time: Res<Time>, mut query: Query<&mut Transform, With<FirstPassCube>>) {
for mut transform in &mut query {
transform.rotate_x(1.5 * time.delta_secs());
transform.rotate_z(1.3 * time.delta_secs());
}
}
/// Rotates the outer cube (main pass)
fn cube_rotator_system(time: Res<Time>, mut query: Query<&mut Transform, With<MainPassCube>>) {
for mut transform in &mut query {
transform.rotate_x(1.0 * time.delta_secs());
transform.rotate_y(0.7 * time.delta_secs());
}
}

View File

@@ -0,0 +1,120 @@
//! Demonstrates how to rotate the skybox and the environment map simultaneously.
use std::f32::consts::PI;
use bevy::{
color::palettes::css::{GOLD, WHITE},
core_pipeline::{tonemapping::Tonemapping::AcesFitted, Skybox},
image::ImageLoaderSettings,
prelude::*,
};
/// Entry point.
pub fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, rotate_skybox_and_environment_map)
.run();
}
/// Initializes the scene.
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
let sphere_mesh = create_sphere_mesh(&mut meshes);
spawn_sphere(&mut commands, &mut materials, &asset_server, &sphere_mesh);
spawn_light(&mut commands);
spawn_camera(&mut commands, &asset_server);
}
/// Rotate the skybox and the environment map per frame.
fn rotate_skybox_and_environment_map(
mut environments: Query<(&mut Skybox, &mut EnvironmentMapLight)>,
time: Res<Time>,
) {
let now = time.elapsed_secs();
let rotation = Quat::from_rotation_y(0.2 * now);
for (mut skybox, mut environment_map) in environments.iter_mut() {
skybox.rotation = rotation;
environment_map.rotation = rotation;
}
}
/// Generates a sphere.
fn create_sphere_mesh(meshes: &mut Assets<Mesh>) -> Handle<Mesh> {
// We're going to use normal maps, so make sure we've generated tangents, or
// else the normal maps won't show up.
let mut sphere_mesh = Sphere::new(1.0).mesh().build();
sphere_mesh
.generate_tangents()
.expect("Failed to generate tangents");
meshes.add(sphere_mesh)
}
/// Spawn a regular object with a clearcoat layer. This looks like car paint.
fn spawn_sphere(
commands: &mut Commands,
materials: &mut Assets<StandardMaterial>,
asset_server: &AssetServer,
sphere_mesh: &Handle<Mesh>,
) {
commands.spawn((
Mesh3d(sphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
clearcoat: 1.0,
clearcoat_perceptual_roughness: 0.3,
clearcoat_normal_texture: Some(asset_server.load_with_settings(
"textures/ScratchedGold-Normal.png",
|settings: &mut ImageLoaderSettings| settings.is_srgb = false,
)),
metallic: 0.9,
perceptual_roughness: 0.1,
base_color: GOLD.into(),
..default()
})),
Transform::from_xyz(0.0, 0.0, 0.0).with_scale(Vec3::splat(1.25)),
));
}
/// Spawns a light.
fn spawn_light(commands: &mut Commands) {
commands.spawn(PointLight {
color: WHITE.into(),
intensity: 100000.0,
..default()
});
}
/// Spawns a camera with associated skybox and environment map.
fn spawn_camera(commands: &mut Commands, asset_server: &AssetServer) {
commands
.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Projection::Perspective(PerspectiveProjection {
fov: 27.0 / 180.0 * PI,
..default()
}),
Transform::from_xyz(0.0, 0.0, 10.0),
AcesFitted,
))
.insert(Skybox {
brightness: 5000.0,
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
..default()
})
.insert(EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2000.0,
..default()
});
}

130
vendor/bevy/examples/3d/scrolling_fog.rs vendored Normal file
View File

@@ -0,0 +1,130 @@
//! Showcases a `FogVolume`'s density texture being scrolled over time to create
//! the effect of fog moving in the wind.
//!
//! The density texture is a repeating 3d noise texture and the `density_texture_offset`
//! is moved every frame to achieve this.
//!
//! The example also utilizes the jitter option of `VolumetricFog` in tandem
//! with temporal anti-aliasing to improve the visual quality of the effect.
//!
//! The camera is looking at a pillar with the sun peaking behind it. The light
//! interactions change based on the density of the fog.
use bevy::{
core_pipeline::{
bloom::Bloom,
experimental::taa::{TemporalAntiAliasPlugin, TemporalAntiAliasing},
},
image::{
ImageAddressMode, ImageFilterMode, ImageLoaderSettings, ImageSampler,
ImageSamplerDescriptor,
},
pbr::{DirectionalLightShadowMap, FogVolume, VolumetricFog, VolumetricLight},
prelude::*,
};
/// Initializes the example.
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Scrolling Fog".into(),
..default()
}),
..default()
}))
.insert_resource(DirectionalLightShadowMap { size: 4096 })
.add_plugins(TemporalAntiAliasPlugin)
.add_systems(Startup, setup)
.add_systems(Update, scroll_fog)
.run();
}
/// Spawns all entities into the scene.
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
assets: Res<AssetServer>,
) {
// Spawn camera with temporal anti-aliasing and a VolumetricFog configuration.
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 2.0, 0.0).looking_at(Vec3::new(-5.0, 3.5, -6.0), Vec3::Y),
Camera {
hdr: true,
..default()
},
Msaa::Off,
TemporalAntiAliasing::default(),
Bloom::default(),
VolumetricFog {
ambient_intensity: 0.0,
jitter: 0.5,
..default()
},
));
// Spawn a directional light shining at the camera with the VolumetricLight component.
commands.spawn((
DirectionalLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(-5.0, 5.0, -7.0).looking_at(Vec3::new(0.0, 0.0, 0.0), Vec3::Y),
VolumetricLight,
));
// Spawn ground mesh.
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(64.0, 1.0, 64.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::BLACK,
perceptual_roughness: 1.0,
..default()
})),
Transform::from_xyz(0.0, -0.5, 0.0),
));
// Spawn pillar standing between the camera and the sun.
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(2.0, 9.0, 2.0))),
MeshMaterial3d(materials.add(Color::BLACK)),
Transform::from_xyz(-10.0, 4.5, -11.0),
));
// Load a repeating 3d noise texture. Make sure to set ImageAddressMode to Repeat
// so that the texture wraps around as the density texture offset is moved along.
// Also set ImageFilterMode to Linear so that the fog isn't pixelated.
let noise_texture = assets.load_with_settings("volumes/fog_noise.ktx2", |settings: &mut _| {
*settings = ImageLoaderSettings {
sampler: ImageSampler::Descriptor(ImageSamplerDescriptor {
address_mode_u: ImageAddressMode::Repeat,
address_mode_v: ImageAddressMode::Repeat,
address_mode_w: ImageAddressMode::Repeat,
mag_filter: ImageFilterMode::Linear,
min_filter: ImageFilterMode::Linear,
mipmap_filter: ImageFilterMode::Linear,
..default()
}),
..default()
}
});
// Spawn a FogVolume and use the repeating noise texture as its density texture.
commands.spawn((
Transform::from_xyz(0.0, 32.0, 0.0).with_scale(Vec3::splat(64.0)),
FogVolume {
density_texture: Some(noise_texture),
density_factor: 0.05,
..default()
},
));
}
/// Moves fog density texture offset every frame.
fn scroll_fog(time: Res<Time>, mut query: Query<&mut FogVolume>) {
for mut fog_volume in query.iter_mut() {
fog_volume.density_texture_offset += Vec3::new(0.0, 0.0, 0.04) * time.delta_secs();
}
}

304
vendor/bevy/examples/3d/shadow_biases.rs vendored Normal file
View File

@@ -0,0 +1,304 @@
//! Demonstrates how shadow biases affect shadows in a 3d scene.
#[path = "../helpers/camera_controller.rs"]
mod camera_controller;
use bevy::{pbr::ShadowFilteringMethod, prelude::*};
use camera_controller::{CameraController, CameraControllerPlugin};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_plugins(CameraControllerPlugin)
.add_systems(Startup, setup)
.add_systems(
Update,
(
cycle_filter_methods,
adjust_light_position,
adjust_point_light_biases,
toggle_light,
adjust_directional_light_biases,
),
)
.run();
}
#[derive(Component)]
struct Lights;
/// set up a 3D scene to test shadow biases and perspective projections
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
let spawn_plane_depth = 300.0f32;
let spawn_height = 2.0;
let sphere_radius = 0.25;
let white_handle = materials.add(StandardMaterial {
base_color: Color::WHITE,
perceptual_roughness: 1.0,
..default()
});
let sphere_handle = meshes.add(Sphere::new(sphere_radius));
let light_transform = Transform::from_xyz(5.0, 5.0, 0.0).looking_at(Vec3::ZERO, Vec3::Y);
commands
.spawn((light_transform, Visibility::default(), Lights))
.with_children(|builder| {
builder.spawn(PointLight {
intensity: 0.0,
range: spawn_plane_depth,
color: Color::WHITE,
shadows_enabled: true,
..default()
});
builder.spawn(DirectionalLight {
shadows_enabled: true,
..default()
});
});
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-1.0, 1.0, 1.0).looking_at(Vec3::new(-1.0, 1.0, 0.0), Vec3::Y),
CameraController::default(),
ShadowFilteringMethod::Hardware2x2,
));
for z_i32 in (-spawn_plane_depth as i32..=0).step_by(2) {
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(white_handle.clone()),
Transform::from_xyz(
0.0,
if z_i32 % 4 == 0 {
spawn_height
} else {
sphere_radius
},
z_i32 as f32,
),
));
}
// ground plane
let plane_size = 2.0 * spawn_plane_depth;
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(plane_size, plane_size))),
MeshMaterial3d(white_handle),
));
commands
.spawn((
Node {
position_type: PositionType::Absolute,
padding: UiRect::all(Val::Px(5.0)),
..default()
},
BackgroundColor(Color::BLACK.with_alpha(0.75)),
GlobalZIndex(i32::MAX),
))
.with_children(|p| {
p.spawn(Text::default()).with_children(|p| {
p.spawn(TextSpan::new("Controls:\n"));
p.spawn(TextSpan::new("R / Z - reset biases to default / zero\n"));
p.spawn(TextSpan::new(
"L - switch between directional and point lights [",
));
p.spawn(TextSpan::new("DirectionalLight"));
p.spawn(TextSpan::new("]\n"));
p.spawn(TextSpan::new(
"F - switch directional light filter methods [",
));
p.spawn(TextSpan::new("Hardware2x2"));
p.spawn(TextSpan::new("]\n"));
p.spawn(TextSpan::new("1/2 - change point light depth bias ["));
p.spawn(TextSpan::new("0.00"));
p.spawn(TextSpan::new("]\n"));
p.spawn(TextSpan::new("3/4 - change point light normal bias ["));
p.spawn(TextSpan::new("0.0"));
p.spawn(TextSpan::new("]\n"));
p.spawn(TextSpan::new("5/6 - change direction light depth bias ["));
p.spawn(TextSpan::new("0.00"));
p.spawn(TextSpan::new("]\n"));
p.spawn(TextSpan::new(
"7/8 - change direction light normal bias [",
));
p.spawn(TextSpan::new("0.0"));
p.spawn(TextSpan::new("]\n"));
p.spawn(TextSpan::new(
"left/right/up/down/pgup/pgdown - adjust light position (looking at 0,0,0) [",
));
p.spawn(TextSpan(format!("{:.1},", light_transform.translation.x)));
p.spawn(TextSpan(format!(" {:.1},", light_transform.translation.y)));
p.spawn(TextSpan(format!(" {:.1}", light_transform.translation.z)));
p.spawn(TextSpan::new("]\n"));
});
});
}
fn toggle_light(
input: Res<ButtonInput<KeyCode>>,
mut point_lights: Query<&mut PointLight>,
mut directional_lights: Query<&mut DirectionalLight>,
example_text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
if input.just_pressed(KeyCode::KeyL) {
for mut light in &mut point_lights {
light.intensity = if light.intensity == 0.0 {
*writer.text(*example_text, 4) = "PointLight".to_string();
100000000.0
} else {
0.0
};
}
for mut light in &mut directional_lights {
light.illuminance = if light.illuminance == 0.0 {
*writer.text(*example_text, 4) = "DirectionalLight".to_string();
100000.0
} else {
0.0
};
}
}
}
fn adjust_light_position(
input: Res<ButtonInput<KeyCode>>,
mut lights: Query<&mut Transform, With<Lights>>,
example_text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
let mut offset = Vec3::ZERO;
if input.just_pressed(KeyCode::ArrowLeft) {
offset.x -= 1.0;
}
if input.just_pressed(KeyCode::ArrowRight) {
offset.x += 1.0;
}
if input.just_pressed(KeyCode::ArrowUp) {
offset.z -= 1.0;
}
if input.just_pressed(KeyCode::ArrowDown) {
offset.z += 1.0;
}
if input.just_pressed(KeyCode::PageDown) {
offset.y -= 1.0;
}
if input.just_pressed(KeyCode::PageUp) {
offset.y += 1.0;
}
if offset != Vec3::ZERO {
let example_text = *example_text;
for mut light in &mut lights {
light.translation += offset;
light.look_at(Vec3::ZERO, Vec3::Y);
*writer.text(example_text, 22) = format!("{:.1},", light.translation.x);
*writer.text(example_text, 23) = format!(" {:.1},", light.translation.y);
*writer.text(example_text, 24) = format!(" {:.1}", light.translation.z);
}
}
}
fn cycle_filter_methods(
input: Res<ButtonInput<KeyCode>>,
mut filter_methods: Query<&mut ShadowFilteringMethod>,
example_text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
if input.just_pressed(KeyCode::KeyF) {
for mut filter_method in &mut filter_methods {
let filter_method_string;
*filter_method = match *filter_method {
ShadowFilteringMethod::Hardware2x2 => {
filter_method_string = "Gaussian".to_string();
ShadowFilteringMethod::Gaussian
}
ShadowFilteringMethod::Gaussian => {
filter_method_string = "Temporal".to_string();
ShadowFilteringMethod::Temporal
}
ShadowFilteringMethod::Temporal => {
filter_method_string = "Hardware2x2".to_string();
ShadowFilteringMethod::Hardware2x2
}
};
*writer.text(*example_text, 7) = filter_method_string;
}
}
}
fn adjust_point_light_biases(
input: Res<ButtonInput<KeyCode>>,
mut query: Query<&mut PointLight>,
example_text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
let depth_bias_step_size = 0.01;
let normal_bias_step_size = 0.1;
for mut light in &mut query {
if input.just_pressed(KeyCode::Digit1) {
light.shadow_depth_bias -= depth_bias_step_size;
}
if input.just_pressed(KeyCode::Digit2) {
light.shadow_depth_bias += depth_bias_step_size;
}
if input.just_pressed(KeyCode::Digit3) {
light.shadow_normal_bias -= normal_bias_step_size;
}
if input.just_pressed(KeyCode::Digit4) {
light.shadow_normal_bias += normal_bias_step_size;
}
if input.just_pressed(KeyCode::KeyR) {
light.shadow_depth_bias = PointLight::DEFAULT_SHADOW_DEPTH_BIAS;
light.shadow_normal_bias = PointLight::DEFAULT_SHADOW_NORMAL_BIAS;
}
if input.just_pressed(KeyCode::KeyZ) {
light.shadow_depth_bias = 0.0;
light.shadow_normal_bias = 0.0;
}
*writer.text(*example_text, 10) = format!("{:.2}", light.shadow_depth_bias);
*writer.text(*example_text, 13) = format!("{:.1}", light.shadow_normal_bias);
}
}
fn adjust_directional_light_biases(
input: Res<ButtonInput<KeyCode>>,
mut query: Query<&mut DirectionalLight>,
example_text: Single<Entity, With<Text>>,
mut writer: TextUiWriter,
) {
let depth_bias_step_size = 0.01;
let normal_bias_step_size = 0.1;
for mut light in &mut query {
if input.just_pressed(KeyCode::Digit5) {
light.shadow_depth_bias -= depth_bias_step_size;
}
if input.just_pressed(KeyCode::Digit6) {
light.shadow_depth_bias += depth_bias_step_size;
}
if input.just_pressed(KeyCode::Digit7) {
light.shadow_normal_bias -= normal_bias_step_size;
}
if input.just_pressed(KeyCode::Digit8) {
light.shadow_normal_bias += normal_bias_step_size;
}
if input.just_pressed(KeyCode::KeyR) {
light.shadow_depth_bias = DirectionalLight::DEFAULT_SHADOW_DEPTH_BIAS;
light.shadow_normal_bias = DirectionalLight::DEFAULT_SHADOW_NORMAL_BIAS;
}
if input.just_pressed(KeyCode::KeyZ) {
light.shadow_depth_bias = 0.0;
light.shadow_normal_bias = 0.0;
}
*writer.text(*example_text, 16) = format!("{:.2}", light.shadow_depth_bias);
*writer.text(*example_text, 19) = format!("{:.1}", light.shadow_normal_bias);
}
}

View File

@@ -0,0 +1,160 @@
//! Demonstrates how to prevent meshes from casting/receiving shadows in a 3d scene.
use std::f32::consts::PI;
use bevy::{
color::palettes::basic::{BLUE, LIME, RED},
pbr::{CascadeShadowConfigBuilder, NotShadowCaster, NotShadowReceiver},
prelude::*,
};
fn main() {
println!(
"Controls:
C - toggle shadow casters (i.e. casters become not, and not casters become casters)
R - toggle shadow receivers (i.e. receivers become not, and not receivers become receivers)
L - switch between directional and point lights"
);
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, (toggle_light, toggle_shadows))
.run();
}
/// set up a 3D scene to test shadow biases and perspective projections
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
let spawn_plane_depth = 500.0f32;
let spawn_height = 2.0;
let sphere_radius = 0.25;
let white_handle = materials.add(StandardMaterial {
base_color: Color::WHITE,
perceptual_roughness: 1.0,
..default()
});
let sphere_handle = meshes.add(Sphere::new(sphere_radius));
// sphere - initially a caster
commands.spawn((
Mesh3d(sphere_handle.clone()),
MeshMaterial3d(materials.add(Color::from(RED))),
Transform::from_xyz(-1.0, spawn_height, 0.0),
));
// sphere - initially not a caster
commands.spawn((
Mesh3d(sphere_handle),
MeshMaterial3d(materials.add(Color::from(BLUE))),
Transform::from_xyz(1.0, spawn_height, 0.0),
NotShadowCaster,
));
// floating plane - initially not a shadow receiver and not a caster
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(20.0, 20.0))),
MeshMaterial3d(materials.add(Color::from(LIME))),
Transform::from_xyz(0.0, 1.0, -10.0),
NotShadowCaster,
NotShadowReceiver,
));
// lower ground plane - initially a shadow receiver
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(20.0, 20.0))),
MeshMaterial3d(white_handle),
));
println!("Using DirectionalLight");
commands.spawn((
PointLight {
intensity: 0.0,
range: spawn_plane_depth,
color: Color::WHITE,
shadows_enabled: true,
..default()
},
Transform::from_xyz(5.0, 5.0, 0.0),
));
commands.spawn((
DirectionalLight {
illuminance: light_consts::lux::OVERCAST_DAY,
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI / 2., -PI / 4.)),
CascadeShadowConfigBuilder {
first_cascade_far_bound: 7.0,
maximum_distance: 25.0,
..default()
}
.build(),
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-5.0, 5.0, 5.0).looking_at(Vec3::new(-1.0, 1.0, 0.0), Vec3::Y),
));
}
fn toggle_light(
input: Res<ButtonInput<KeyCode>>,
mut point_lights: Query<&mut PointLight>,
mut directional_lights: Query<&mut DirectionalLight>,
) {
if input.just_pressed(KeyCode::KeyL) {
for mut light in &mut point_lights {
light.intensity = if light.intensity == 0.0 {
println!("Using PointLight");
1_000_000.0 // Mini-sun point light
} else {
0.0
};
}
for mut light in &mut directional_lights {
light.illuminance = if light.illuminance == 0.0 {
println!("Using DirectionalLight");
light_consts::lux::OVERCAST_DAY
} else {
0.0
};
}
}
}
fn toggle_shadows(
mut commands: Commands,
input: Res<ButtonInput<KeyCode>>,
mut queries: ParamSet<(
Query<Entity, (With<Mesh3d>, With<NotShadowCaster>)>,
Query<Entity, (With<Mesh3d>, With<NotShadowReceiver>)>,
Query<Entity, (With<Mesh3d>, Without<NotShadowCaster>)>,
Query<Entity, (With<Mesh3d>, Without<NotShadowReceiver>)>,
)>,
) {
if input.just_pressed(KeyCode::KeyC) {
println!("Toggling casters");
for entity in queries.p0().iter() {
commands.entity(entity).remove::<NotShadowCaster>();
}
for entity in queries.p2().iter() {
commands.entity(entity).insert(NotShadowCaster);
}
}
if input.just_pressed(KeyCode::KeyR) {
println!("Toggling receivers");
for entity in queries.p1().iter() {
commands.entity(entity).remove::<NotShadowReceiver>();
}
for entity in queries.p3().iter() {
commands.entity(entity).insert(NotShadowReceiver);
}
}
}

177
vendor/bevy/examples/3d/skybox.rs vendored Normal file
View File

@@ -0,0 +1,177 @@
//! Load a cubemap texture onto a cube like a skybox and cycle through different compressed texture formats
#[path = "../helpers/camera_controller.rs"]
mod camera_controller;
use bevy::{
core_pipeline::Skybox,
image::CompressedImageFormats,
prelude::*,
render::{
render_resource::{TextureViewDescriptor, TextureViewDimension},
renderer::RenderDevice,
},
};
use camera_controller::{CameraController, CameraControllerPlugin};
use std::f32::consts::PI;
const CUBEMAPS: &[(&str, CompressedImageFormats)] = &[
(
"textures/Ryfjallet_cubemap.png",
CompressedImageFormats::NONE,
),
(
"textures/Ryfjallet_cubemap_astc4x4.ktx2",
CompressedImageFormats::ASTC_LDR,
),
(
"textures/Ryfjallet_cubemap_bc7.ktx2",
CompressedImageFormats::BC,
),
(
"textures/Ryfjallet_cubemap_etc2.ktx2",
CompressedImageFormats::ETC2,
),
];
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_plugins(CameraControllerPlugin)
.add_systems(Startup, setup)
.add_systems(
Update,
(
cycle_cubemap_asset,
asset_loaded.after(cycle_cubemap_asset),
animate_light_direction,
),
)
.run();
}
#[derive(Resource)]
struct Cubemap {
is_loaded: bool,
index: usize,
image_handle: Handle<Image>,
}
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
// directional 'sun' light
commands.spawn((
DirectionalLight {
illuminance: 32000.0,
..default()
},
Transform::from_xyz(0.0, 2.0, 0.0).with_rotation(Quat::from_rotation_x(-PI / 4.)),
));
let skybox_handle = asset_server.load(CUBEMAPS[0].0);
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.0, 0.0, 8.0).looking_at(Vec3::ZERO, Vec3::Y),
CameraController::default(),
Skybox {
image: skybox_handle.clone(),
brightness: 1000.0,
..default()
},
));
// ambient light
// NOTE: The ambient light is used to scale how bright the environment map is so with a bright
// environment map, use an appropriate color and brightness to match
commands.insert_resource(AmbientLight {
color: Color::srgb_u8(210, 220, 240),
brightness: 1.0,
..default()
});
commands.insert_resource(Cubemap {
is_loaded: false,
index: 0,
image_handle: skybox_handle,
});
}
const CUBEMAP_SWAP_DELAY: f32 = 3.0;
fn cycle_cubemap_asset(
time: Res<Time>,
mut next_swap: Local<f32>,
mut cubemap: ResMut<Cubemap>,
asset_server: Res<AssetServer>,
render_device: Res<RenderDevice>,
) {
let now = time.elapsed_secs();
if *next_swap == 0.0 {
*next_swap = now + CUBEMAP_SWAP_DELAY;
return;
} else if now < *next_swap {
return;
}
*next_swap += CUBEMAP_SWAP_DELAY;
let supported_compressed_formats =
CompressedImageFormats::from_features(render_device.features());
let mut new_index = cubemap.index;
for _ in 0..CUBEMAPS.len() {
new_index = (new_index + 1) % CUBEMAPS.len();
if supported_compressed_formats.contains(CUBEMAPS[new_index].1) {
break;
}
info!(
"Skipping format which is not supported by current hardware: {:?}",
CUBEMAPS[new_index]
);
}
// Skip swapping to the same texture. Useful for when ktx2, zstd, or compressed texture support
// is missing
if new_index == cubemap.index {
return;
}
cubemap.index = new_index;
cubemap.image_handle = asset_server.load(CUBEMAPS[cubemap.index].0);
cubemap.is_loaded = false;
}
fn asset_loaded(
asset_server: Res<AssetServer>,
mut images: ResMut<Assets<Image>>,
mut cubemap: ResMut<Cubemap>,
mut skyboxes: Query<&mut Skybox>,
) {
if !cubemap.is_loaded && asset_server.load_state(&cubemap.image_handle).is_loaded() {
info!("Swapping to {}...", CUBEMAPS[cubemap.index].0);
let image = images.get_mut(&cubemap.image_handle).unwrap();
// NOTE: PNGs do not have any metadata that could indicate they contain a cubemap texture,
// so they appear as one texture. The following code reconfigures the texture as necessary.
if image.texture_descriptor.array_layer_count() == 1 {
image.reinterpret_stacked_2d_as_array(image.height() / image.width());
image.texture_view_descriptor = Some(TextureViewDescriptor {
dimension: Some(TextureViewDimension::Cube),
..default()
});
}
for mut skybox in &mut skyboxes {
skybox.image = cubemap.image_handle.clone();
}
cubemap.is_loaded = true;
}
}
fn animate_light_direction(
time: Res<Time>,
mut query: Query<&mut Transform, With<DirectionalLight>>,
) {
for mut transform in &mut query {
transform.rotate_y(time.delta_secs() * 0.5);
}
}

227
vendor/bevy/examples/3d/specular_tint.rs vendored Normal file
View File

@@ -0,0 +1,227 @@
//! Demonstrates specular tints and maps.
use std::f32::consts::PI;
use bevy::{color::palettes::css::WHITE, core_pipeline::Skybox, prelude::*};
/// The camera rotation speed in radians per frame.
const ROTATION_SPEED: f32 = 0.005;
/// The rate at which the specular tint hue changes in degrees per frame.
const HUE_SHIFT_SPEED: f32 = 0.2;
static SWITCH_TO_MAP_HELP_TEXT: &str = "Press Space to switch to a specular map";
static SWITCH_TO_SOLID_TINT_HELP_TEXT: &str = "Press Space to switch to a solid specular tint";
/// The current settings the user has chosen.
#[derive(Resource, Default)]
struct AppStatus {
/// The type of tint (solid or texture map).
tint_type: TintType,
/// The hue of the solid tint in radians.
hue: f32,
}
/// Assets needed by the demo.
#[derive(Resource)]
struct AppAssets {
/// A color tileable 3D noise texture.
noise_texture: Handle<Image>,
}
impl FromWorld for AppAssets {
fn from_world(world: &mut World) -> Self {
let asset_server = world.resource::<AssetServer>();
Self {
noise_texture: asset_server.load("textures/AlphaNoise.png"),
}
}
}
/// The type of specular tint that the user has selected.
#[derive(Clone, Copy, PartialEq, Default)]
enum TintType {
/// A solid color.
#[default]
Solid,
/// A Perlin noise texture.
Map,
}
/// The entry point.
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Specular Tint Example".into(),
..default()
}),
..default()
}))
.init_resource::<AppAssets>()
.init_resource::<AppStatus>()
.insert_resource(AmbientLight {
color: Color::BLACK,
brightness: 0.0,
..default()
})
.add_systems(Startup, setup)
.add_systems(Update, rotate_camera)
.add_systems(Update, (toggle_specular_map, update_text).chain())
.add_systems(Update, shift_hue.after(toggle_specular_map))
.run();
}
/// Creates the scene.
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
app_status: Res<AppStatus>,
mut meshes: ResMut<Assets<Mesh>>,
mut standard_materials: ResMut<Assets<StandardMaterial>>,
) {
// Spawns a camera.
commands.spawn((
Transform::from_xyz(-2.0, 0.0, 3.5).looking_at(Vec3::ZERO, Vec3::Y),
Camera {
hdr: true,
..default()
},
Camera3d::default(),
Skybox {
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
brightness: 3000.0,
..default()
},
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
// We want relatively high intensity here in order for the specular
// tint to show up well.
intensity: 25000.0,
..default()
},
));
// Spawn the sphere.
commands.spawn((
Transform::from_rotation(Quat::from_rotation_x(PI * 0.5)),
Mesh3d(meshes.add(Sphere::default().mesh().uv(32, 18))),
MeshMaterial3d(standard_materials.add(StandardMaterial {
// We want only reflected specular light here, so we set the base
// color as black.
base_color: Color::BLACK,
reflectance: 1.0,
specular_tint: Color::hsva(app_status.hue, 1.0, 1.0, 1.0),
// The object must not be metallic, or else the reflectance is
// ignored per the Filament spec:
//
// <https://google.github.io/filament/Filament.html#listing_fnormal>
metallic: 0.0,
perceptual_roughness: 0.0,
..default()
})),
));
// Spawn the help text.
commands.spawn((
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
app_status.create_text(),
));
}
/// Rotates the camera a bit every frame.
fn rotate_camera(mut cameras: Query<&mut Transform, With<Camera3d>>) {
for mut camera_transform in cameras.iter_mut() {
camera_transform.translation =
Quat::from_rotation_y(ROTATION_SPEED) * camera_transform.translation;
camera_transform.look_at(Vec3::ZERO, Vec3::Y);
}
}
/// Alters the hue of the solid color a bit every frame.
fn shift_hue(
mut app_status: ResMut<AppStatus>,
objects_with_materials: Query<&MeshMaterial3d<StandardMaterial>>,
mut standard_materials: ResMut<Assets<StandardMaterial>>,
) {
if app_status.tint_type != TintType::Solid {
return;
}
app_status.hue += HUE_SHIFT_SPEED;
for material_handle in objects_with_materials.iter() {
let Some(material) = standard_materials.get_mut(material_handle) else {
continue;
};
material.specular_tint = Color::hsva(app_status.hue, 1.0, 1.0, 1.0);
}
}
impl AppStatus {
/// Returns appropriate help text that reflects the current app status.
fn create_text(&self) -> Text {
let tint_map_help_text = match self.tint_type {
TintType::Solid => SWITCH_TO_MAP_HELP_TEXT,
TintType::Map => SWITCH_TO_SOLID_TINT_HELP_TEXT,
};
Text::new(tint_map_help_text)
}
}
/// Changes the specular tint to a solid color or map when the user presses
/// Space.
fn toggle_specular_map(
keyboard: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
app_assets: Res<AppAssets>,
objects_with_materials: Query<&MeshMaterial3d<StandardMaterial>>,
mut standard_materials: ResMut<Assets<StandardMaterial>>,
) {
if !keyboard.just_pressed(KeyCode::Space) {
return;
}
// Swap tint type.
app_status.tint_type = match app_status.tint_type {
TintType::Solid => TintType::Map,
TintType::Map => TintType::Solid,
};
for material_handle in objects_with_materials.iter() {
let Some(material) = standard_materials.get_mut(material_handle) else {
continue;
};
// Adjust the tint type.
match app_status.tint_type {
TintType::Solid => {
material.reflectance = 1.0;
material.specular_tint_texture = None;
}
TintType::Map => {
// Set reflectance to 2.0 to spread out the map's reflectance
// range from the default [0.0, 0.5] to [0.0, 1.0].
material.reflectance = 2.0;
// As the tint map is multiplied by the tint color, we set the
// latter to white so that only the map has an effect.
material.specular_tint = WHITE.into();
material.specular_tint_texture = Some(app_assets.noise_texture.clone());
}
};
}
}
/// Updates the help text at the bottom of the screen to reflect the current app
/// status.
fn update_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) {
for mut text in text_query.iter_mut() {
*text = app_status.create_text();
}
}

View File

@@ -0,0 +1,66 @@
//! Demonstrates how lighting is affected by different radius of point lights.
use bevy::prelude::*;
fn main() {
App::new()
.insert_resource(AmbientLight {
brightness: 60.0,
..default()
})
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(0.2, 1.5, 2.5).looking_at(Vec3::ZERO, Vec3::Y),
));
// plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(100.0, 100.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(0.2, 0.2, 0.2),
perceptual_roughness: 0.08,
..default()
})),
));
const COUNT: usize = 6;
let position_range = -2.0..2.0;
let radius_range = 0.0..0.4;
let pos_len = position_range.end - position_range.start;
let radius_len = radius_range.end - radius_range.start;
let mesh = meshes.add(Sphere::new(1.0).mesh().uv(120, 64));
for i in 0..COUNT {
let percent = i as f32 / COUNT as f32;
let radius = radius_range.start + percent * radius_len;
// sphere light
commands
.spawn((
Mesh3d(mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(0.5, 0.5, 1.0),
unlit: true,
..default()
})),
Transform::from_xyz(position_range.start + percent * pos_len, 0.3, 0.0)
.with_scale(Vec3::splat(radius)),
))
.with_child(PointLight {
radius,
color: Color::srgb(0.2, 0.2, 1.0),
..default()
});
}
}

207
vendor/bevy/examples/3d/split_screen.rs vendored Normal file
View File

@@ -0,0 +1,207 @@
//! Renders two cameras to the same window to accomplish "split screen".
use std::f32::consts::PI;
use bevy::{
pbr::CascadeShadowConfigBuilder, prelude::*, render::camera::Viewport, window::WindowResized,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, (set_camera_viewports, button_system))
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(100.0, 100.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
commands.spawn(SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")),
));
// Light
commands.spawn((
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)),
DirectionalLight {
shadows_enabled: true,
..default()
},
CascadeShadowConfigBuilder {
num_cascades: if cfg!(all(
feature = "webgl2",
target_arch = "wasm32",
not(feature = "webgpu")
)) {
// Limited to 1 cascade in WebGL
1
} else {
2
},
first_cascade_far_bound: 200.0,
maximum_distance: 280.0,
..default()
}
.build(),
));
// Cameras and their dedicated UI
for (index, (camera_name, camera_pos)) in [
("Player 1", Vec3::new(0.0, 200.0, -150.0)),
("Player 2", Vec3::new(150.0, 150., 50.0)),
("Player 3", Vec3::new(100.0, 150., -150.0)),
("Player 4", Vec3::new(-100.0, 80., 150.0)),
]
.iter()
.enumerate()
{
let camera = commands
.spawn((
Camera3d::default(),
Transform::from_translation(*camera_pos).looking_at(Vec3::ZERO, Vec3::Y),
Camera {
// Renders cameras with different priorities to prevent ambiguities
order: index as isize,
..default()
},
CameraPosition {
pos: UVec2::new((index % 2) as u32, (index / 2) as u32),
},
))
.id();
// Set up UI
commands
.spawn((
UiTargetCamera(camera),
Node {
width: Val::Percent(100.),
height: Val::Percent(100.),
..default()
},
))
.with_children(|parent| {
parent.spawn((
Text::new(*camera_name),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.),
left: Val::Px(12.),
..default()
},
));
buttons_panel(parent);
});
}
fn buttons_panel(parent: &mut ChildSpawnerCommands) {
parent
.spawn(Node {
position_type: PositionType::Absolute,
width: Val::Percent(100.),
height: Val::Percent(100.),
display: Display::Flex,
flex_direction: FlexDirection::Row,
justify_content: JustifyContent::SpaceBetween,
align_items: AlignItems::Center,
padding: UiRect::all(Val::Px(20.)),
..default()
})
.with_children(|parent| {
rotate_button(parent, "<", Direction::Left);
rotate_button(parent, ">", Direction::Right);
});
}
fn rotate_button(parent: &mut ChildSpawnerCommands, caption: &str, direction: Direction) {
parent
.spawn((
RotateCamera(direction),
Button,
Node {
width: Val::Px(40.),
height: Val::Px(40.),
border: UiRect::all(Val::Px(2.)),
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
..default()
},
BorderColor(Color::WHITE),
BackgroundColor(Color::srgb(0.25, 0.25, 0.25)),
))
.with_children(|parent| {
parent.spawn(Text::new(caption));
});
}
}
#[derive(Component)]
struct CameraPosition {
pos: UVec2,
}
#[derive(Component)]
struct RotateCamera(Direction);
enum Direction {
Left,
Right,
}
fn set_camera_viewports(
windows: Query<&Window>,
mut resize_events: EventReader<WindowResized>,
mut query: Query<(&CameraPosition, &mut Camera)>,
) {
// We need to dynamically resize the camera's viewports whenever the window size changes
// so then each camera always takes up half the screen.
// A resize_event is sent when the window is first created, allowing us to reuse this system for initial setup.
for resize_event in resize_events.read() {
let window = windows.get(resize_event.window).unwrap();
let size = window.physical_size() / 2;
for (camera_position, mut camera) in &mut query {
camera.viewport = Some(Viewport {
physical_position: camera_position.pos * size,
physical_size: size,
..default()
});
}
}
}
fn button_system(
interaction_query: Query<
(&Interaction, &ComputedNodeTarget, &RotateCamera),
(Changed<Interaction>, With<Button>),
>,
mut camera_query: Query<&mut Transform, With<Camera>>,
) {
for (interaction, computed_target, RotateCamera(direction)) in &interaction_query {
if let Interaction::Pressed = *interaction {
// Since TargetCamera propagates to the children, we can use it to find
// which side of the screen the button is on.
if let Some(mut camera_transform) = computed_target
.camera()
.and_then(|camera| camera_query.get_mut(camera).ok())
{
let angle = match direction {
Direction::Left => -0.1,
Direction::Right => 0.1,
};
camera_transform.rotate_around(Vec3::ZERO, Quat::from_axis_angle(Vec3::Y, angle));
}
}
}
}

203
vendor/bevy/examples/3d/spotlight.rs vendored Normal file
View File

@@ -0,0 +1,203 @@
//! Illustrates spot lights.
use std::f32::consts::*;
use bevy::{
color::palettes::basic::{MAROON, RED},
math::ops,
pbr::NotShadowCaster,
prelude::*,
};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
const INSTRUCTIONS: &str = "\
Controls
--------
Horizontal Movement: WASD
Vertical Movement: Space and Shift
Rotate Camera: Left and Right Arrows";
fn main() {
App::new()
.insert_resource(AmbientLight {
brightness: 20.0,
..default()
})
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, (light_sway, movement, rotation))
.run();
}
#[derive(Component)]
struct Movable;
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// ground plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(100.0, 100.0))),
MeshMaterial3d(materials.add(Color::WHITE)),
Movable,
));
// cubes
// We're seeding the PRNG here to make this example deterministic for testing purposes.
// This isn't strictly required in practical use unless you need your app to be deterministic.
let mut rng = ChaCha8Rng::seed_from_u64(19878367467713);
let cube_mesh = meshes.add(Cuboid::new(0.5, 0.5, 0.5));
let blue = materials.add(Color::srgb_u8(124, 144, 255));
commands.spawn_batch(
std::iter::repeat_with(move || {
let x = rng.gen_range(-5.0..5.0);
let y = rng.gen_range(0.0..3.0);
let z = rng.gen_range(-5.0..5.0);
(
Mesh3d(cube_mesh.clone()),
MeshMaterial3d(blue.clone()),
Transform::from_xyz(x, y, z),
Movable,
)
})
.take(40),
);
let sphere_mesh = meshes.add(Sphere::new(0.05).mesh().uv(32, 18));
let sphere_mesh_direction = meshes.add(Sphere::new(0.1).mesh().uv(32, 18));
let red_emissive = materials.add(StandardMaterial {
base_color: RED.into(),
emissive: LinearRgba::new(1.0, 0.0, 0.0, 0.0),
..default()
});
let maroon_emissive = materials.add(StandardMaterial {
base_color: MAROON.into(),
emissive: LinearRgba::new(0.369, 0.0, 0.0, 0.0),
..default()
});
for x in 0..4 {
for z in 0..4 {
let x = x as f32 - 2.0;
let z = z as f32 - 2.0;
// red spot_light
commands
.spawn((
SpotLight {
intensity: 40_000.0, // lumens
color: Color::WHITE,
shadows_enabled: true,
inner_angle: PI / 4.0 * 0.85,
outer_angle: PI / 4.0,
..default()
},
Transform::from_xyz(1.0 + x, 2.0, z)
.looking_at(Vec3::new(1.0 + x, 0.0, z), Vec3::X),
))
.with_children(|builder| {
builder.spawn((
Mesh3d(sphere_mesh.clone()),
MeshMaterial3d(red_emissive.clone()),
));
builder.spawn((
Mesh3d(sphere_mesh_direction.clone()),
MeshMaterial3d(maroon_emissive.clone()),
Transform::from_translation(Vec3::Z * -0.1),
NotShadowCaster,
));
});
}
}
// camera
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(-4.0, 5.0, 10.0).looking_at(Vec3::ZERO, Vec3::Y),
));
commands.spawn((
Text::new(INSTRUCTIONS),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
fn light_sway(time: Res<Time>, mut query: Query<(&mut Transform, &mut SpotLight)>) {
for (mut transform, mut angles) in query.iter_mut() {
transform.rotation = Quat::from_euler(
EulerRot::XYZ,
-FRAC_PI_2 + ops::sin(time.elapsed_secs() * 0.67 * 3.0) * 0.5,
ops::sin(time.elapsed_secs() * 3.0) * 0.5,
0.0,
);
let angle = (ops::sin(time.elapsed_secs() * 1.2) + 1.0) * (FRAC_PI_4 - 0.1);
angles.inner_angle = angle * 0.8;
angles.outer_angle = angle;
}
}
fn movement(
input: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
mut query: Query<&mut Transform, With<Movable>>,
) {
// Calculate translation to move the cubes and ground plane
let mut translation = Vec3::ZERO;
// Horizontal forward and backward movement
if input.pressed(KeyCode::KeyW) {
translation.z += 1.0;
} else if input.pressed(KeyCode::KeyS) {
translation.z -= 1.0;
}
// Horizontal left and right movement
if input.pressed(KeyCode::KeyA) {
translation.x += 1.0;
} else if input.pressed(KeyCode::KeyD) {
translation.x -= 1.0;
}
// Vertical movement
if input.pressed(KeyCode::ShiftLeft) {
translation.y += 1.0;
} else if input.pressed(KeyCode::Space) {
translation.y -= 1.0;
}
translation *= 2.0 * time.delta_secs();
// Apply translation
for mut transform in &mut query {
transform.translation += translation;
}
}
fn rotation(
mut transform: Single<&mut Transform, With<Camera>>,
input: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
) {
let delta = time.delta_secs();
if input.pressed(KeyCode::ArrowLeft) {
transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(delta));
} else if input.pressed(KeyCode::ArrowRight) {
transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(-delta));
}
}

201
vendor/bevy/examples/3d/ssao.rs vendored Normal file
View File

@@ -0,0 +1,201 @@
//! A scene showcasing screen space ambient occlusion.
use bevy::{
core_pipeline::experimental::taa::{TemporalAntiAliasPlugin, TemporalAntiAliasing},
math::ops,
pbr::{ScreenSpaceAmbientOcclusion, ScreenSpaceAmbientOcclusionQualityLevel},
prelude::*,
render::camera::TemporalJitter,
};
use std::f32::consts::PI;
fn main() {
App::new()
.insert_resource(AmbientLight {
brightness: 1000.,
..default()
})
.add_plugins((DefaultPlugins, TemporalAntiAliasPlugin))
.add_systems(Startup, setup)
.add_systems(Update, update)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(-2.0, 2.0, -2.0).looking_at(Vec3::ZERO, Vec3::Y),
Msaa::Off,
ScreenSpaceAmbientOcclusion::default(),
TemporalAntiAliasing::default(),
));
let material = materials.add(StandardMaterial {
base_color: Color::srgb(0.5, 0.5, 0.5),
perceptual_roughness: 1.0,
reflectance: 0.0,
..default()
});
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(material.clone()),
Transform::from_xyz(0.0, 0.0, 1.0),
));
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(material.clone()),
Transform::from_xyz(0.0, -1.0, 0.0),
));
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(material),
Transform::from_xyz(1.0, 0.0, 0.0),
));
commands.spawn((
Mesh3d(meshes.add(Sphere::new(0.4).mesh().uv(72, 36))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(0.4, 0.4, 0.4),
perceptual_roughness: 1.0,
reflectance: 0.0,
..default()
})),
SphereMarker,
));
commands.spawn((
DirectionalLight {
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
));
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
fn update(
camera: Single<
(
Entity,
Option<&ScreenSpaceAmbientOcclusion>,
Option<&TemporalJitter>,
),
With<Camera>,
>,
mut text: Single<&mut Text>,
mut sphere: Single<&mut Transform, With<SphereMarker>>,
mut commands: Commands,
keycode: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
) {
sphere.translation.y = ops::sin(time.elapsed_secs() / 1.7) * 0.7;
let (camera_entity, ssao, temporal_jitter) = *camera;
let current_ssao = ssao.cloned().unwrap_or_default();
let mut commands = commands.entity(camera_entity);
commands
.insert_if(
ScreenSpaceAmbientOcclusion {
quality_level: ScreenSpaceAmbientOcclusionQualityLevel::Low,
..current_ssao
},
|| keycode.just_pressed(KeyCode::Digit2),
)
.insert_if(
ScreenSpaceAmbientOcclusion {
quality_level: ScreenSpaceAmbientOcclusionQualityLevel::Medium,
..current_ssao
},
|| keycode.just_pressed(KeyCode::Digit3),
)
.insert_if(
ScreenSpaceAmbientOcclusion {
quality_level: ScreenSpaceAmbientOcclusionQualityLevel::High,
..current_ssao
},
|| keycode.just_pressed(KeyCode::Digit4),
)
.insert_if(
ScreenSpaceAmbientOcclusion {
quality_level: ScreenSpaceAmbientOcclusionQualityLevel::Ultra,
..current_ssao
},
|| keycode.just_pressed(KeyCode::Digit5),
)
.insert_if(
ScreenSpaceAmbientOcclusion {
constant_object_thickness: (current_ssao.constant_object_thickness * 2.0).min(4.0),
..current_ssao
},
|| keycode.just_pressed(KeyCode::ArrowUp),
)
.insert_if(
ScreenSpaceAmbientOcclusion {
constant_object_thickness: (current_ssao.constant_object_thickness * 0.5)
.max(0.0625),
..current_ssao
},
|| keycode.just_pressed(KeyCode::ArrowDown),
);
if keycode.just_pressed(KeyCode::Digit1) {
commands.remove::<ScreenSpaceAmbientOcclusion>();
}
if keycode.just_pressed(KeyCode::Space) {
if temporal_jitter.is_some() {
commands.remove::<TemporalJitter>();
} else {
commands.insert(TemporalJitter::default());
}
}
text.clear();
let (o, l, m, h, u) = match ssao.map(|s| s.quality_level) {
None => ("*", "", "", "", ""),
Some(ScreenSpaceAmbientOcclusionQualityLevel::Low) => ("", "*", "", "", ""),
Some(ScreenSpaceAmbientOcclusionQualityLevel::Medium) => ("", "", "*", "", ""),
Some(ScreenSpaceAmbientOcclusionQualityLevel::High) => ("", "", "", "*", ""),
Some(ScreenSpaceAmbientOcclusionQualityLevel::Ultra) => ("", "", "", "", "*"),
_ => unreachable!(),
};
if let Some(thickness) = ssao.map(|s| s.constant_object_thickness) {
text.push_str(&format!(
"Constant object thickness: {} (Up/Down)\n\n",
thickness
));
}
text.push_str("SSAO Quality:\n");
text.push_str(&format!("(1) {o}Off{o}\n"));
text.push_str(&format!("(2) {l}Low{l}\n"));
text.push_str(&format!("(3) {m}Medium{m}\n"));
text.push_str(&format!("(4) {h}High{h}\n"));
text.push_str(&format!("(5) {u}Ultra{u}\n\n"));
text.push_str("Temporal Antialiasing:\n");
text.push_str(match temporal_jitter {
Some(_) => "(Space) Enabled",
None => "(Space) Disabled",
});
}
#[derive(Component)]
struct SphereMarker;

414
vendor/bevy/examples/3d/ssr.rs vendored Normal file
View File

@@ -0,0 +1,414 @@
//! Demonstrates screen space reflections in deferred rendering.
use std::ops::Range;
use bevy::{
color::palettes::css::{BLACK, WHITE},
core_pipeline::{fxaa::Fxaa, Skybox},
image::{
ImageAddressMode, ImageFilterMode, ImageLoaderSettings, ImageSampler,
ImageSamplerDescriptor,
},
input::mouse::MouseWheel,
math::{vec3, vec4},
pbr::{
DefaultOpaqueRendererMethod, ExtendedMaterial, MaterialExtension, ScreenSpaceReflections,
},
prelude::*,
render::render_resource::{AsBindGroup, ShaderRef, ShaderType},
};
/// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/water_material.wgsl";
// The speed of camera movement.
const CAMERA_KEYBOARD_ZOOM_SPEED: f32 = 0.1;
const CAMERA_KEYBOARD_ORBIT_SPEED: f32 = 0.02;
const CAMERA_MOUSE_WHEEL_ZOOM_SPEED: f32 = 0.25;
// We clamp camera distances to this range.
const CAMERA_ZOOM_RANGE: Range<f32> = 2.0..12.0;
static TURN_SSR_OFF_HELP_TEXT: &str = "Press Space to turn screen-space reflections off";
static TURN_SSR_ON_HELP_TEXT: &str = "Press Space to turn screen-space reflections on";
static MOVE_CAMERA_HELP_TEXT: &str =
"Press WASD or use the mouse wheel to pan and orbit the camera";
static SWITCH_TO_FLIGHT_HELMET_HELP_TEXT: &str = "Press Enter to switch to the flight helmet model";
static SWITCH_TO_CUBE_HELP_TEXT: &str = "Press Enter to switch to the cube model";
/// A custom [`ExtendedMaterial`] that creates animated water ripples.
#[derive(Asset, TypePath, AsBindGroup, Debug, Clone)]
struct Water {
/// The normal map image.
///
/// Note that, like all normal maps, this must not be loaded as sRGB.
#[texture(100)]
#[sampler(101)]
normals: Handle<Image>,
// Parameters to the water shader.
#[uniform(102)]
settings: WaterSettings,
}
/// Parameters to the water shader.
#[derive(ShaderType, Debug, Clone)]
struct WaterSettings {
/// How much to displace each octave each frame, in the u and v directions.
/// Two octaves are packed into each `vec4`.
octave_vectors: [Vec4; 2],
/// How wide the waves are in each octave.
octave_scales: Vec4,
/// How high the waves are in each octave.
octave_strengths: Vec4,
}
/// The current settings that the user has chosen.
#[derive(Resource)]
struct AppSettings {
/// Whether screen space reflections are on.
ssr_on: bool,
/// Which model is being displayed.
displayed_model: DisplayedModel,
}
/// Which model is being displayed.
#[derive(Default)]
enum DisplayedModel {
/// The cube is being displayed.
#[default]
Cube,
/// The flight helmet is being displayed.
FlightHelmet,
}
/// A marker component for the cube model.
#[derive(Component)]
struct CubeModel;
/// A marker component for the flight helmet model.
#[derive(Component)]
struct FlightHelmetModel;
fn main() {
// Enable deferred rendering, which is necessary for screen-space
// reflections at this time. Disable multisampled antialiasing, as deferred
// rendering doesn't support that.
App::new()
.insert_resource(DefaultOpaqueRendererMethod::deferred())
.init_resource::<AppSettings>()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Screen Space Reflections Example".into(),
..default()
}),
..default()
}))
.add_plugins(MaterialPlugin::<ExtendedMaterial<StandardMaterial, Water>>::default())
.add_systems(Startup, setup)
.add_systems(Update, rotate_model)
.add_systems(Update, move_camera)
.add_systems(Update, adjust_app_settings)
.run();
}
// Set up the scene.
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut standard_materials: ResMut<Assets<StandardMaterial>>,
mut water_materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, Water>>>,
asset_server: Res<AssetServer>,
app_settings: Res<AppSettings>,
) {
spawn_cube(
&mut commands,
&asset_server,
&mut meshes,
&mut standard_materials,
);
spawn_flight_helmet(&mut commands, &asset_server);
spawn_water(
&mut commands,
&asset_server,
&mut meshes,
&mut water_materials,
);
spawn_camera(&mut commands, &asset_server);
spawn_text(&mut commands, &app_settings);
}
// Spawns the rotating cube.
fn spawn_cube(
commands: &mut Commands,
asset_server: &AssetServer,
meshes: &mut Assets<Mesh>,
standard_materials: &mut Assets<StandardMaterial>,
) {
commands
.spawn((
Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))),
MeshMaterial3d(standard_materials.add(StandardMaterial {
base_color: Color::from(WHITE),
base_color_texture: Some(asset_server.load("branding/icon.png")),
..default()
})),
Transform::from_xyz(0.0, 0.5, 0.0),
))
.insert(CubeModel);
}
// Spawns the flight helmet.
fn spawn_flight_helmet(commands: &mut Commands, asset_server: &AssetServer) {
commands.spawn((
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
Transform::from_scale(Vec3::splat(2.5)),
FlightHelmetModel,
Visibility::Hidden,
));
}
// Spawns the water plane.
fn spawn_water(
commands: &mut Commands,
asset_server: &AssetServer,
meshes: &mut Assets<Mesh>,
water_materials: &mut Assets<ExtendedMaterial<StandardMaterial, Water>>,
) {
commands.spawn((
Mesh3d(meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(1.0)))),
MeshMaterial3d(water_materials.add(ExtendedMaterial {
base: StandardMaterial {
base_color: BLACK.into(),
perceptual_roughness: 0.0,
..default()
},
extension: Water {
normals: asset_server.load_with_settings::<Image, ImageLoaderSettings>(
"textures/water_normals.png",
|settings| {
settings.is_srgb = false;
settings.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor {
address_mode_u: ImageAddressMode::Repeat,
address_mode_v: ImageAddressMode::Repeat,
mag_filter: ImageFilterMode::Linear,
min_filter: ImageFilterMode::Linear,
..default()
});
},
),
// These water settings are just random values to create some
// variety.
settings: WaterSettings {
octave_vectors: [
vec4(0.080, 0.059, 0.073, -0.062),
vec4(0.153, 0.138, -0.149, -0.195),
],
octave_scales: vec4(1.0, 2.1, 7.9, 14.9) * 5.0,
octave_strengths: vec4(0.16, 0.18, 0.093, 0.044),
},
},
})),
Transform::from_scale(Vec3::splat(100.0)),
));
}
// Spawns the camera.
fn spawn_camera(commands: &mut Commands, asset_server: &AssetServer) {
// Create the camera. Add an environment map and skybox so the water has
// something interesting to reflect, other than the cube. Enable deferred
// rendering by adding depth and deferred prepasses. Turn on FXAA to make
// the scene look a little nicer. Finally, add screen space reflections.
commands
.spawn((
Camera3d::default(),
Transform::from_translation(vec3(-1.25, 2.25, 4.5)).looking_at(Vec3::ZERO, Vec3::Y),
Camera {
hdr: true,
..default()
},
Msaa::Off,
))
.insert(EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 5000.0,
..default()
})
.insert(Skybox {
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
brightness: 5000.0,
..default()
})
.insert(ScreenSpaceReflections::default())
.insert(Fxaa::default());
}
// Spawns the help text.
fn spawn_text(commands: &mut Commands, app_settings: &AppSettings) {
commands.spawn((
create_text(app_settings),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// Creates or recreates the help text.
fn create_text(app_settings: &AppSettings) -> Text {
format!(
"{}\n{}\n{}",
match app_settings.displayed_model {
DisplayedModel::Cube => SWITCH_TO_FLIGHT_HELMET_HELP_TEXT,
DisplayedModel::FlightHelmet => SWITCH_TO_CUBE_HELP_TEXT,
},
if app_settings.ssr_on {
TURN_SSR_OFF_HELP_TEXT
} else {
TURN_SSR_ON_HELP_TEXT
},
MOVE_CAMERA_HELP_TEXT
)
.into()
}
impl MaterialExtension for Water {
fn deferred_fragment_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
}
/// Rotates the model on the Y axis a bit every frame.
fn rotate_model(
mut query: Query<&mut Transform, Or<(With<CubeModel>, With<FlightHelmetModel>)>>,
time: Res<Time>,
) {
for mut transform in query.iter_mut() {
transform.rotation = Quat::from_euler(EulerRot::XYZ, 0.0, time.elapsed_secs(), 0.0);
}
}
// Processes input related to camera movement.
fn move_camera(
keyboard_input: Res<ButtonInput<KeyCode>>,
mut mouse_wheel_input: EventReader<MouseWheel>,
mut cameras: Query<&mut Transform, With<Camera>>,
) {
let (mut distance_delta, mut theta_delta) = (0.0, 0.0);
// Handle keyboard events.
if keyboard_input.pressed(KeyCode::KeyW) {
distance_delta -= CAMERA_KEYBOARD_ZOOM_SPEED;
}
if keyboard_input.pressed(KeyCode::KeyS) {
distance_delta += CAMERA_KEYBOARD_ZOOM_SPEED;
}
if keyboard_input.pressed(KeyCode::KeyA) {
theta_delta += CAMERA_KEYBOARD_ORBIT_SPEED;
}
if keyboard_input.pressed(KeyCode::KeyD) {
theta_delta -= CAMERA_KEYBOARD_ORBIT_SPEED;
}
// Handle mouse events.
for mouse_wheel_event in mouse_wheel_input.read() {
distance_delta -= mouse_wheel_event.y * CAMERA_MOUSE_WHEEL_ZOOM_SPEED;
}
// Update transforms.
for mut camera_transform in cameras.iter_mut() {
let local_z = camera_transform.local_z().as_vec3().normalize_or_zero();
if distance_delta != 0.0 {
camera_transform.translation = (camera_transform.translation.length() + distance_delta)
.clamp(CAMERA_ZOOM_RANGE.start, CAMERA_ZOOM_RANGE.end)
* local_z;
}
if theta_delta != 0.0 {
camera_transform
.translate_around(Vec3::ZERO, Quat::from_axis_angle(Vec3::Y, theta_delta));
camera_transform.look_at(Vec3::ZERO, Vec3::Y);
}
}
}
// Adjusts app settings per user input.
fn adjust_app_settings(
mut commands: Commands,
keyboard_input: Res<ButtonInput<KeyCode>>,
mut app_settings: ResMut<AppSettings>,
mut cameras: Query<Entity, With<Camera>>,
mut cube_models: Query<&mut Visibility, (With<CubeModel>, Without<FlightHelmetModel>)>,
mut flight_helmet_models: Query<&mut Visibility, (Without<CubeModel>, With<FlightHelmetModel>)>,
mut text: Query<&mut Text>,
) {
// If there are no changes, we're going to bail for efficiency. Record that
// here.
let mut any_changes = false;
// If the user pressed Space, toggle SSR.
if keyboard_input.just_pressed(KeyCode::Space) {
app_settings.ssr_on = !app_settings.ssr_on;
any_changes = true;
}
// If the user pressed Enter, switch models.
if keyboard_input.just_pressed(KeyCode::Enter) {
app_settings.displayed_model = match app_settings.displayed_model {
DisplayedModel::Cube => DisplayedModel::FlightHelmet,
DisplayedModel::FlightHelmet => DisplayedModel::Cube,
};
any_changes = true;
}
// If there were no changes, bail.
if !any_changes {
return;
}
// Update SSR settings.
for camera in cameras.iter_mut() {
if app_settings.ssr_on {
commands
.entity(camera)
.insert(ScreenSpaceReflections::default());
} else {
commands.entity(camera).remove::<ScreenSpaceReflections>();
}
}
// Set cube model visibility.
for mut cube_visibility in cube_models.iter_mut() {
*cube_visibility = match app_settings.displayed_model {
DisplayedModel::Cube => Visibility::Visible,
_ => Visibility::Hidden,
}
}
// Set flight helmet model visibility.
for mut flight_helmet_visibility in flight_helmet_models.iter_mut() {
*flight_helmet_visibility = match app_settings.displayed_model {
DisplayedModel::FlightHelmet => Visibility::Visible,
_ => Visibility::Hidden,
};
}
// Update the help text.
for mut text in text.iter_mut() {
*text = create_text(&app_settings);
}
}
impl Default for AppSettings {
fn default() -> Self {
Self {
ssr_on: true,
displayed_model: default(),
}
}
}

78
vendor/bevy/examples/3d/texture.rs vendored Normal file
View File

@@ -0,0 +1,78 @@
//! This example shows various ways to configure texture materials in 3D.
use std::f32::consts::PI;
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
/// sets up a scene with textured entities
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// load a texture and retrieve its aspect ratio
let texture_handle = asset_server.load("branding/bevy_logo_dark_big.png");
let aspect = 0.25;
// create a new quad mesh. this is what we will apply the texture to
let quad_width = 8.0;
let quad_handle = meshes.add(Rectangle::new(quad_width, quad_width * aspect));
// this material renders the texture normally
let material_handle = materials.add(StandardMaterial {
base_color_texture: Some(texture_handle.clone()),
alpha_mode: AlphaMode::Blend,
unlit: true,
..default()
});
// this material modulates the texture to make it red (and slightly transparent)
let red_material_handle = materials.add(StandardMaterial {
base_color: Color::srgba(1.0, 0.0, 0.0, 0.5),
base_color_texture: Some(texture_handle.clone()),
alpha_mode: AlphaMode::Blend,
unlit: true,
..default()
});
// and lets make this one blue! (and also slightly transparent)
let blue_material_handle = materials.add(StandardMaterial {
base_color: Color::srgba(0.0, 0.0, 1.0, 0.5),
base_color_texture: Some(texture_handle),
alpha_mode: AlphaMode::Blend,
unlit: true,
..default()
});
// textured quad - normal
commands.spawn((
Mesh3d(quad_handle.clone()),
MeshMaterial3d(material_handle),
Transform::from_xyz(0.0, 0.0, 1.5).with_rotation(Quat::from_rotation_x(-PI / 5.0)),
));
// textured quad - modulated
commands.spawn((
Mesh3d(quad_handle.clone()),
MeshMaterial3d(red_material_handle),
Transform::from_rotation(Quat::from_rotation_x(-PI / 5.0)),
));
// textured quad - modulated
commands.spawn((
Mesh3d(quad_handle),
MeshMaterial3d(blue_material_handle),
Transform::from_xyz(0.0, 0.0, -1.5).with_rotation(Quat::from_rotation_x(-PI / 5.0)),
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(3.0, 5.0, 8.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}

616
vendor/bevy/examples/3d/tonemapping.rs vendored Normal file
View File

@@ -0,0 +1,616 @@
//! This examples compares Tonemapping options
use bevy::{
core_pipeline::tonemapping::Tonemapping,
pbr::CascadeShadowConfigBuilder,
platform::collections::HashMap,
prelude::*,
reflect::TypePath,
render::{
render_resource::{AsBindGroup, ShaderRef},
view::{ColorGrading, ColorGradingGlobal, ColorGradingSection},
},
};
use std::f32::consts::PI;
/// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/tonemapping_test_patterns.wgsl";
fn main() {
App::new()
.add_plugins((
DefaultPlugins,
MaterialPlugin::<ColorGradientMaterial>::default(),
))
.insert_resource(CameraTransform(
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y),
))
.init_resource::<PerMethodSettings>()
.insert_resource(CurrentScene(1))
.insert_resource(SelectedParameter { value: 0, max: 4 })
.add_systems(
Startup,
(
setup,
setup_basic_scene,
setup_color_gradient_scene,
setup_image_viewer_scene,
),
)
.add_systems(
Update,
(
drag_drop_image,
resize_image,
toggle_scene,
toggle_tonemapping_method,
update_color_grading_settings,
update_ui,
),
)
.run();
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
camera_transform: Res<CameraTransform>,
) {
// camera
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
camera_transform.0,
DistanceFog {
color: Color::srgb_u8(43, 44, 47),
falloff: FogFalloff::Linear {
start: 1.0,
end: 8.0,
},
..default()
},
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 2000.0,
..default()
},
));
// ui
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
fn setup_basic_scene(mut commands: Commands, asset_server: Res<AssetServer>) {
// Main scene
commands.spawn((
SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/TonemappingTest/TonemappingTest.gltf"),
)),
SceneNumber(1),
));
// Flight Helmet
commands.spawn((
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
Transform::from_xyz(0.5, 0.0, -0.5).with_rotation(Quat::from_rotation_y(-0.15 * PI)),
SceneNumber(1),
));
// light
commands.spawn((
DirectionalLight {
illuminance: 15_000.,
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
CascadeShadowConfigBuilder {
maximum_distance: 3.0,
first_cascade_far_bound: 0.9,
..default()
}
.build(),
SceneNumber(1),
));
}
fn setup_color_gradient_scene(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<ColorGradientMaterial>>,
camera_transform: Res<CameraTransform>,
) {
let mut transform = camera_transform.0;
transform.translation += *transform.forward();
commands.spawn((
Mesh3d(meshes.add(Rectangle::new(0.7, 0.7))),
MeshMaterial3d(materials.add(ColorGradientMaterial {})),
transform,
Visibility::Hidden,
SceneNumber(2),
));
}
fn setup_image_viewer_scene(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
camera_transform: Res<CameraTransform>,
) {
let mut transform = camera_transform.0;
transform.translation += *transform.forward();
// exr/hdr viewer (exr requires enabling bevy feature)
commands.spawn((
Mesh3d(meshes.add(Rectangle::default())),
MeshMaterial3d(materials.add(StandardMaterial {
base_color_texture: None,
unlit: true,
..default()
})),
transform,
Visibility::Hidden,
SceneNumber(3),
HDRViewer,
));
commands.spawn((
Text::new("Drag and drop an HDR or EXR file"),
TextFont {
font_size: 36.0,
..default()
},
TextColor(Color::BLACK),
TextLayout::new_with_justify(JustifyText::Center),
Node {
align_self: AlignSelf::Center,
margin: UiRect::all(Val::Auto),
..default()
},
SceneNumber(3),
Visibility::Hidden,
));
}
// ----------------------------------------------------------------------------
fn drag_drop_image(
image_mat: Query<&MeshMaterial3d<StandardMaterial>, With<HDRViewer>>,
text: Query<Entity, (With<Text>, With<SceneNumber>)>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut drop_events: EventReader<FileDragAndDrop>,
asset_server: Res<AssetServer>,
mut commands: Commands,
) {
let Some(new_image) = drop_events.read().find_map(|e| match e {
FileDragAndDrop::DroppedFile { path_buf, .. } => {
Some(asset_server.load(path_buf.to_string_lossy().to_string()))
}
_ => None,
}) else {
return;
};
for mat_h in &image_mat {
if let Some(mat) = materials.get_mut(mat_h) {
mat.base_color_texture = Some(new_image.clone());
// Despawn the image viewer instructions
if let Ok(text_entity) = text.single() {
commands.entity(text_entity).despawn();
}
}
}
}
fn resize_image(
image_mesh: Query<(&MeshMaterial3d<StandardMaterial>, &Mesh3d), With<HDRViewer>>,
materials: Res<Assets<StandardMaterial>>,
mut meshes: ResMut<Assets<Mesh>>,
images: Res<Assets<Image>>,
mut image_events: EventReader<AssetEvent<Image>>,
) {
for event in image_events.read() {
let (AssetEvent::Added { id } | AssetEvent::Modified { id }) = event else {
continue;
};
for (mat_h, mesh_h) in &image_mesh {
let Some(mat) = materials.get(mat_h) else {
continue;
};
let Some(ref base_color_texture) = mat.base_color_texture else {
continue;
};
if *id != base_color_texture.id() {
continue;
};
let Some(image_changed) = images.get(*id) else {
continue;
};
let size = image_changed.size_f32().normalize_or_zero() * 1.4;
// Resize Mesh
let quad = Mesh::from(Rectangle::from_size(size));
meshes.insert(mesh_h, quad);
}
}
}
fn toggle_scene(
keys: Res<ButtonInput<KeyCode>>,
mut query: Query<(&mut Visibility, &SceneNumber)>,
mut current_scene: ResMut<CurrentScene>,
) {
let mut pressed = None;
if keys.just_pressed(KeyCode::KeyQ) {
pressed = Some(1);
} else if keys.just_pressed(KeyCode::KeyW) {
pressed = Some(2);
} else if keys.just_pressed(KeyCode::KeyE) {
pressed = Some(3);
}
if let Some(pressed) = pressed {
current_scene.0 = pressed;
for (mut visibility, scene) in query.iter_mut() {
if scene.0 == pressed {
*visibility = Visibility::Visible;
} else {
*visibility = Visibility::Hidden;
}
}
}
}
fn toggle_tonemapping_method(
keys: Res<ButtonInput<KeyCode>>,
mut tonemapping: Single<&mut Tonemapping>,
mut color_grading: Single<&mut ColorGrading>,
per_method_settings: Res<PerMethodSettings>,
) {
if keys.just_pressed(KeyCode::Digit1) {
**tonemapping = Tonemapping::None;
} else if keys.just_pressed(KeyCode::Digit2) {
**tonemapping = Tonemapping::Reinhard;
} else if keys.just_pressed(KeyCode::Digit3) {
**tonemapping = Tonemapping::ReinhardLuminance;
} else if keys.just_pressed(KeyCode::Digit4) {
**tonemapping = Tonemapping::AcesFitted;
} else if keys.just_pressed(KeyCode::Digit5) {
**tonemapping = Tonemapping::AgX;
} else if keys.just_pressed(KeyCode::Digit6) {
**tonemapping = Tonemapping::SomewhatBoringDisplayTransform;
} else if keys.just_pressed(KeyCode::Digit7) {
**tonemapping = Tonemapping::TonyMcMapface;
} else if keys.just_pressed(KeyCode::Digit8) {
**tonemapping = Tonemapping::BlenderFilmic;
}
**color_grading = (*per_method_settings
.settings
.get::<Tonemapping>(&tonemapping)
.as_ref()
.unwrap())
.clone();
}
#[derive(Resource)]
struct SelectedParameter {
value: i32,
max: i32,
}
impl SelectedParameter {
fn next(&mut self) {
self.value = (self.value + 1).rem_euclid(self.max);
}
fn prev(&mut self) {
self.value = (self.value - 1).rem_euclid(self.max);
}
}
fn update_color_grading_settings(
keys: Res<ButtonInput<KeyCode>>,
time: Res<Time>,
mut per_method_settings: ResMut<PerMethodSettings>,
tonemapping: Single<&Tonemapping>,
current_scene: Res<CurrentScene>,
mut selected_parameter: ResMut<SelectedParameter>,
) {
let color_grading = per_method_settings.settings.get_mut(*tonemapping).unwrap();
let mut dt = time.delta_secs() * 0.25;
if keys.pressed(KeyCode::ArrowLeft) {
dt = -dt;
}
if keys.just_pressed(KeyCode::ArrowDown) {
selected_parameter.next();
}
if keys.just_pressed(KeyCode::ArrowUp) {
selected_parameter.prev();
}
if keys.pressed(KeyCode::ArrowLeft) || keys.pressed(KeyCode::ArrowRight) {
match selected_parameter.value {
0 => {
color_grading.global.exposure += dt;
}
1 => {
color_grading
.all_sections_mut()
.for_each(|section| section.gamma += dt);
}
2 => {
color_grading
.all_sections_mut()
.for_each(|section| section.saturation += dt);
}
3 => {
color_grading.global.post_saturation += dt;
}
_ => {}
}
}
if keys.just_pressed(KeyCode::Space) {
for (_, grading) in per_method_settings.settings.iter_mut() {
*grading = ColorGrading::default();
}
}
if keys.just_pressed(KeyCode::Enter) && current_scene.0 == 1 {
for (mapper, grading) in per_method_settings.settings.iter_mut() {
*grading = PerMethodSettings::basic_scene_recommendation(*mapper);
}
}
}
fn update_ui(
mut text_query: Single<&mut Text, Without<SceneNumber>>,
settings: Single<(&Tonemapping, &ColorGrading)>,
current_scene: Res<CurrentScene>,
selected_parameter: Res<SelectedParameter>,
mut hide_ui: Local<bool>,
keys: Res<ButtonInput<KeyCode>>,
) {
if keys.just_pressed(KeyCode::KeyH) {
*hide_ui = !*hide_ui;
}
if *hide_ui {
if !text_query.is_empty() {
// single_mut() always triggers change detection,
// so only access if text actually needs changing
text_query.clear();
}
return;
}
let (tonemapping, color_grading) = *settings;
let tonemapping = *tonemapping;
let mut text = String::with_capacity(text_query.len());
let scn = current_scene.0;
text.push_str("(H) Hide UI\n\n");
text.push_str("Test Scene: \n");
text.push_str(&format!(
"(Q) {} Basic Scene\n",
if scn == 1 { ">" } else { "" }
));
text.push_str(&format!(
"(W) {} Color Sweep\n",
if scn == 2 { ">" } else { "" }
));
text.push_str(&format!(
"(E) {} Image Viewer\n",
if scn == 3 { ">" } else { "" }
));
text.push_str("\n\nTonemapping Method:\n");
text.push_str(&format!(
"(1) {} Disabled\n",
if tonemapping == Tonemapping::None {
">"
} else {
""
}
));
text.push_str(&format!(
"(2) {} Reinhard\n",
if tonemapping == Tonemapping::Reinhard {
"> "
} else {
""
}
));
text.push_str(&format!(
"(3) {} Reinhard Luminance\n",
if tonemapping == Tonemapping::ReinhardLuminance {
">"
} else {
""
}
));
text.push_str(&format!(
"(4) {} ACES Fitted\n",
if tonemapping == Tonemapping::AcesFitted {
">"
} else {
""
}
));
text.push_str(&format!(
"(5) {} AgX\n",
if tonemapping == Tonemapping::AgX {
">"
} else {
""
}
));
text.push_str(&format!(
"(6) {} SomewhatBoringDisplayTransform\n",
if tonemapping == Tonemapping::SomewhatBoringDisplayTransform {
">"
} else {
""
}
));
text.push_str(&format!(
"(7) {} TonyMcMapface\n",
if tonemapping == Tonemapping::TonyMcMapface {
">"
} else {
""
}
));
text.push_str(&format!(
"(8) {} Blender Filmic\n",
if tonemapping == Tonemapping::BlenderFilmic {
">"
} else {
""
}
));
text.push_str("\n\nColor Grading:\n");
text.push_str("(arrow keys)\n");
if selected_parameter.value == 0 {
text.push_str("> ");
}
text.push_str(&format!("Exposure: {}\n", color_grading.global.exposure));
if selected_parameter.value == 1 {
text.push_str("> ");
}
text.push_str(&format!("Gamma: {}\n", color_grading.shadows.gamma));
if selected_parameter.value == 2 {
text.push_str("> ");
}
text.push_str(&format!(
"PreSaturation: {}\n",
color_grading.shadows.saturation
));
if selected_parameter.value == 3 {
text.push_str("> ");
}
text.push_str(&format!(
"PostSaturation: {}\n",
color_grading.global.post_saturation
));
text.push_str("(Space) Reset all to default\n");
if current_scene.0 == 1 {
text.push_str("(Enter) Reset all to scene recommendation\n");
}
if text != text_query.as_str() {
// single_mut() always triggers change detection,
// so only access if text actually changed
text_query.0 = text;
}
}
// ----------------------------------------------------------------------------
#[derive(Resource)]
struct PerMethodSettings {
settings: HashMap<Tonemapping, ColorGrading>,
}
impl PerMethodSettings {
fn basic_scene_recommendation(method: Tonemapping) -> ColorGrading {
match method {
Tonemapping::Reinhard | Tonemapping::ReinhardLuminance => ColorGrading {
global: ColorGradingGlobal {
exposure: 0.5,
..default()
},
..default()
},
Tonemapping::AcesFitted => ColorGrading {
global: ColorGradingGlobal {
exposure: 0.35,
..default()
},
..default()
},
Tonemapping::AgX => ColorGrading::with_identical_sections(
ColorGradingGlobal {
exposure: -0.2,
post_saturation: 1.1,
..default()
},
ColorGradingSection {
saturation: 1.1,
..default()
},
),
_ => ColorGrading::default(),
}
}
}
impl Default for PerMethodSettings {
fn default() -> Self {
let mut settings = <HashMap<_, _>>::default();
for method in [
Tonemapping::None,
Tonemapping::Reinhard,
Tonemapping::ReinhardLuminance,
Tonemapping::AcesFitted,
Tonemapping::AgX,
Tonemapping::SomewhatBoringDisplayTransform,
Tonemapping::TonyMcMapface,
Tonemapping::BlenderFilmic,
] {
settings.insert(
method,
PerMethodSettings::basic_scene_recommendation(method),
);
}
Self { settings }
}
}
impl Material for ColorGradientMaterial {
fn fragment_shader() -> ShaderRef {
SHADER_ASSET_PATH.into()
}
}
#[derive(Asset, TypePath, AsBindGroup, Debug, Clone)]
struct ColorGradientMaterial {}
#[derive(Resource)]
struct CameraTransform(Transform);
#[derive(Resource)]
struct CurrentScene(u32);
#[derive(Component)]
struct SceneNumber(u32);
#[derive(Component)]
struct HDRViewer;

616
vendor/bevy/examples/3d/transmission.rs vendored Normal file
View File

@@ -0,0 +1,616 @@
//! This example showcases light transmission
//!
//! ## Controls
//!
//! | Key Binding | Action |
//! |:-------------------|:-----------------------------------------------------|
//! | `J`/`K`/`L`/`;` | Change Screen Space Transmission Quality |
//! | `O` / `P` | Decrease / Increase Screen Space Transmission Steps |
//! | `1` / `2` | Decrease / Increase Diffuse Transmission |
//! | `Q` / `W` | Decrease / Increase Specular Transmission |
//! | `A` / `S` | Decrease / Increase Thickness |
//! | `Z` / `X` | Decrease / Increase IOR |
//! | `E` / `R` | Decrease / Increase Perceptual Roughness |
//! | `U` / `I` | Decrease / Increase Reflectance |
//! | Arrow Keys | Control Camera |
//! | `C` | Randomize Colors |
//! | `H` | Toggle HDR + Bloom |
//! | `D` | Toggle Depth Prepass |
//! | `T` | Toggle TAA |
use std::f32::consts::PI;
use bevy::{
color::palettes::css::*,
core_pipeline::{
bloom::Bloom, core_3d::ScreenSpaceTransmissionQuality, prepass::DepthPrepass,
tonemapping::Tonemapping,
},
math::ops,
pbr::{NotShadowCaster, PointLightShadowMap, TransmittedShadowReceiver},
prelude::*,
render::{
camera::{Exposure, TemporalJitter},
view::{ColorGrading, ColorGradingGlobal},
},
};
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
use bevy::core_pipeline::experimental::taa::{TemporalAntiAliasPlugin, TemporalAntiAliasing};
use rand::random;
fn main() {
let mut app = App::new();
app.add_plugins(DefaultPlugins)
.insert_resource(ClearColor(Color::BLACK))
.insert_resource(PointLightShadowMap { size: 2048 })
.insert_resource(AmbientLight {
brightness: 0.0,
..default()
})
.add_systems(Startup, setup)
.add_systems(Update, (example_control_system, flicker_system));
// *Note:* TAA is not _required_ for specular transmission, but
// it _greatly enhances_ the look of the resulting blur effects.
// Sadly, it's not available under WebGL.
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
app.add_plugins(TemporalAntiAliasPlugin);
app.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
) {
let icosphere_mesh = meshes.add(Sphere::new(0.9).mesh().ico(7).unwrap());
let cube_mesh = meshes.add(Cuboid::new(0.7, 0.7, 0.7));
let plane_mesh = meshes.add(Plane3d::default().mesh().size(2.0, 2.0));
let cylinder_mesh = meshes.add(Cylinder::new(0.5, 2.0).mesh().resolution(50));
// Cube #1
commands.spawn((
Mesh3d(cube_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial::default())),
Transform::from_xyz(0.25, 0.5, -2.0).with_rotation(Quat::from_euler(
EulerRot::XYZ,
1.4,
3.7,
21.3,
)),
ExampleControls {
color: true,
specular_transmission: false,
diffuse_transmission: false,
},
));
// Cube #2
commands.spawn((
Mesh3d(cube_mesh),
MeshMaterial3d(materials.add(StandardMaterial::default())),
Transform::from_xyz(-0.75, 0.7, -2.0).with_rotation(Quat::from_euler(
EulerRot::XYZ,
0.4,
2.3,
4.7,
)),
ExampleControls {
color: true,
specular_transmission: false,
diffuse_transmission: false,
},
));
// Candle
commands.spawn((
Mesh3d(cylinder_mesh),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(0.9, 0.2, 0.3),
diffuse_transmission: 0.7,
perceptual_roughness: 0.32,
thickness: 0.2,
..default()
})),
Transform::from_xyz(-1.0, 0.0, 0.0),
ExampleControls {
color: true,
specular_transmission: false,
diffuse_transmission: true,
},
));
// Candle Flame
let scaled_white = LinearRgba::from(ANTIQUE_WHITE) * 20.;
let scaled_orange = LinearRgba::from(ORANGE_RED) * 4.;
let emissive = LinearRgba {
red: scaled_white.red + scaled_orange.red,
green: scaled_white.green + scaled_orange.green,
blue: scaled_white.blue + scaled_orange.blue,
alpha: 1.0,
};
commands.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
emissive,
diffuse_transmission: 1.0,
..default()
})),
Transform::from_xyz(-1.0, 1.15, 0.0).with_scale(Vec3::new(0.1, 0.2, 0.1)),
Flicker,
NotShadowCaster,
));
// Glass Sphere
commands.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::WHITE,
specular_transmission: 0.9,
diffuse_transmission: 1.0,
thickness: 1.8,
ior: 1.5,
perceptual_roughness: 0.12,
..default()
})),
Transform::from_xyz(1.0, 0.0, 0.0),
ExampleControls {
color: true,
specular_transmission: true,
diffuse_transmission: false,
},
));
// R Sphere
commands.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: RED.into(),
specular_transmission: 0.9,
diffuse_transmission: 1.0,
thickness: 1.8,
ior: 1.5,
perceptual_roughness: 0.12,
..default()
})),
Transform::from_xyz(1.0, -0.5, 2.0).with_scale(Vec3::splat(0.5)),
ExampleControls {
color: true,
specular_transmission: true,
diffuse_transmission: false,
},
));
// G Sphere
commands.spawn((
Mesh3d(icosphere_mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: LIME.into(),
specular_transmission: 0.9,
diffuse_transmission: 1.0,
thickness: 1.8,
ior: 1.5,
perceptual_roughness: 0.12,
..default()
})),
Transform::from_xyz(0.0, -0.5, 2.0).with_scale(Vec3::splat(0.5)),
ExampleControls {
color: true,
specular_transmission: true,
diffuse_transmission: false,
},
));
// B Sphere
commands.spawn((
Mesh3d(icosphere_mesh),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: BLUE.into(),
specular_transmission: 0.9,
diffuse_transmission: 1.0,
thickness: 1.8,
ior: 1.5,
perceptual_roughness: 0.12,
..default()
})),
Transform::from_xyz(-1.0, -0.5, 2.0).with_scale(Vec3::splat(0.5)),
ExampleControls {
color: true,
specular_transmission: true,
diffuse_transmission: false,
},
));
// Chessboard Plane
let black_material = materials.add(StandardMaterial {
base_color: Color::BLACK,
reflectance: 0.3,
perceptual_roughness: 0.8,
..default()
});
let white_material = materials.add(StandardMaterial {
base_color: Color::WHITE,
reflectance: 0.3,
perceptual_roughness: 0.8,
..default()
});
for x in -3..4 {
for z in -3..4 {
commands.spawn((
Mesh3d(plane_mesh.clone()),
MeshMaterial3d(if (x + z) % 2 == 0 {
black_material.clone()
} else {
white_material.clone()
}),
Transform::from_xyz(x as f32 * 2.0, -1.0, z as f32 * 2.0),
ExampleControls {
color: true,
specular_transmission: false,
diffuse_transmission: false,
},
));
}
}
// Paper
commands.spawn((
Mesh3d(plane_mesh),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::WHITE,
diffuse_transmission: 0.6,
perceptual_roughness: 0.8,
reflectance: 1.0,
double_sided: true,
cull_mode: None,
..default()
})),
Transform::from_xyz(0.0, 0.5, -3.0)
.with_scale(Vec3::new(2.0, 1.0, 1.0))
.with_rotation(Quat::from_euler(EulerRot::XYZ, PI / 2.0, 0.0, 0.0)),
TransmittedShadowReceiver,
ExampleControls {
specular_transmission: false,
color: false,
diffuse_transmission: true,
},
));
// Candle Light
commands.spawn((
Transform::from_xyz(-1.0, 1.7, 0.0),
PointLight {
color: Color::from(
LinearRgba::from(ANTIQUE_WHITE).mix(&LinearRgba::from(ORANGE_RED), 0.2),
),
intensity: 4_000.0,
radius: 0.2,
range: 5.0,
shadows_enabled: true,
..default()
},
Flicker,
));
// Camera
commands.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(1.0, 1.8, 7.0).looking_at(Vec3::ZERO, Vec3::Y),
ColorGrading {
global: ColorGradingGlobal {
post_saturation: 1.2,
..default()
},
..default()
},
Tonemapping::TonyMcMapface,
Exposure { ev100: 6.0 },
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
Msaa::Off,
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
TemporalAntiAliasing::default(),
EnvironmentMapLight {
intensity: 25.0,
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
..default()
},
Bloom::default(),
));
// Controls Text
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
ExampleDisplay,
));
}
#[derive(Component)]
struct Flicker;
#[derive(Component)]
struct ExampleControls {
diffuse_transmission: bool,
specular_transmission: bool,
color: bool,
}
struct ExampleState {
diffuse_transmission: f32,
specular_transmission: f32,
thickness: f32,
ior: f32,
perceptual_roughness: f32,
reflectance: f32,
auto_camera: bool,
}
#[derive(Component)]
struct ExampleDisplay;
impl Default for ExampleState {
fn default() -> Self {
ExampleState {
diffuse_transmission: 0.5,
specular_transmission: 0.9,
thickness: 1.8,
ior: 1.5,
perceptual_roughness: 0.12,
reflectance: 0.5,
auto_camera: true,
}
}
}
fn example_control_system(
mut commands: Commands,
mut materials: ResMut<Assets<StandardMaterial>>,
controllable: Query<(&MeshMaterial3d<StandardMaterial>, &ExampleControls)>,
camera: Single<
(
Entity,
&mut Camera,
&mut Camera3d,
&mut Transform,
Option<&DepthPrepass>,
Option<&TemporalJitter>,
),
With<Camera3d>,
>,
mut display: Single<&mut Text, With<ExampleDisplay>>,
mut state: Local<ExampleState>,
time: Res<Time>,
input: Res<ButtonInput<KeyCode>>,
) {
if input.pressed(KeyCode::Digit2) {
state.diffuse_transmission = (state.diffuse_transmission + time.delta_secs()).min(1.0);
} else if input.pressed(KeyCode::Digit1) {
state.diffuse_transmission = (state.diffuse_transmission - time.delta_secs()).max(0.0);
}
if input.pressed(KeyCode::KeyW) {
state.specular_transmission = (state.specular_transmission + time.delta_secs()).min(1.0);
} else if input.pressed(KeyCode::KeyQ) {
state.specular_transmission = (state.specular_transmission - time.delta_secs()).max(0.0);
}
if input.pressed(KeyCode::KeyS) {
state.thickness = (state.thickness + time.delta_secs()).min(5.0);
} else if input.pressed(KeyCode::KeyA) {
state.thickness = (state.thickness - time.delta_secs()).max(0.0);
}
if input.pressed(KeyCode::KeyX) {
state.ior = (state.ior + time.delta_secs()).min(3.0);
} else if input.pressed(KeyCode::KeyZ) {
state.ior = (state.ior - time.delta_secs()).max(1.0);
}
if input.pressed(KeyCode::KeyI) {
state.reflectance = (state.reflectance + time.delta_secs()).min(1.0);
} else if input.pressed(KeyCode::KeyU) {
state.reflectance = (state.reflectance - time.delta_secs()).max(0.0);
}
if input.pressed(KeyCode::KeyR) {
state.perceptual_roughness = (state.perceptual_roughness + time.delta_secs()).min(1.0);
} else if input.pressed(KeyCode::KeyE) {
state.perceptual_roughness = (state.perceptual_roughness - time.delta_secs()).max(0.0);
}
let randomize_colors = input.just_pressed(KeyCode::KeyC);
for (material_handle, controls) in &controllable {
let material = materials.get_mut(material_handle).unwrap();
if controls.specular_transmission {
material.specular_transmission = state.specular_transmission;
material.thickness = state.thickness;
material.ior = state.ior;
material.perceptual_roughness = state.perceptual_roughness;
material.reflectance = state.reflectance;
}
if controls.diffuse_transmission {
material.diffuse_transmission = state.diffuse_transmission;
}
if controls.color && randomize_colors {
material.base_color =
Color::srgba(random(), random(), random(), material.base_color.alpha());
}
}
let (
camera_entity,
mut camera,
mut camera_3d,
mut camera_transform,
depth_prepass,
temporal_jitter,
) = camera.into_inner();
if input.just_pressed(KeyCode::KeyH) {
camera.hdr = !camera.hdr;
}
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
if input.just_pressed(KeyCode::KeyD) {
if depth_prepass.is_none() {
commands.entity(camera_entity).insert(DepthPrepass);
} else {
commands.entity(camera_entity).remove::<DepthPrepass>();
}
}
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
if input.just_pressed(KeyCode::KeyT) {
if temporal_jitter.is_none() {
commands
.entity(camera_entity)
.insert((TemporalJitter::default(), TemporalAntiAliasing::default()));
} else {
commands
.entity(camera_entity)
.remove::<(TemporalJitter, TemporalAntiAliasing)>();
}
}
if input.just_pressed(KeyCode::KeyO) && camera_3d.screen_space_specular_transmission_steps > 0 {
camera_3d.screen_space_specular_transmission_steps -= 1;
}
if input.just_pressed(KeyCode::KeyP) && camera_3d.screen_space_specular_transmission_steps < 4 {
camera_3d.screen_space_specular_transmission_steps += 1;
}
if input.just_pressed(KeyCode::KeyJ) {
camera_3d.screen_space_specular_transmission_quality = ScreenSpaceTransmissionQuality::Low;
}
if input.just_pressed(KeyCode::KeyK) {
camera_3d.screen_space_specular_transmission_quality =
ScreenSpaceTransmissionQuality::Medium;
}
if input.just_pressed(KeyCode::KeyL) {
camera_3d.screen_space_specular_transmission_quality = ScreenSpaceTransmissionQuality::High;
}
if input.just_pressed(KeyCode::Semicolon) {
camera_3d.screen_space_specular_transmission_quality =
ScreenSpaceTransmissionQuality::Ultra;
}
let rotation = if input.pressed(KeyCode::ArrowRight) {
state.auto_camera = false;
time.delta_secs()
} else if input.pressed(KeyCode::ArrowLeft) {
state.auto_camera = false;
-time.delta_secs()
} else if state.auto_camera {
time.delta_secs() * 0.25
} else {
0.0
};
let distance_change =
if input.pressed(KeyCode::ArrowDown) && camera_transform.translation.length() < 25.0 {
time.delta_secs()
} else if input.pressed(KeyCode::ArrowUp) && camera_transform.translation.length() > 2.0 {
-time.delta_secs()
} else {
0.0
};
camera_transform.translation *= ops::exp(distance_change);
camera_transform.rotate_around(
Vec3::ZERO,
Quat::from_euler(EulerRot::XYZ, 0.0, rotation, 0.0),
);
display.0 = format!(
concat!(
" J / K / L / ; Screen Space Specular Transmissive Quality: {:?}\n",
" O / P Screen Space Specular Transmissive Steps: {}\n",
" 1 / 2 Diffuse Transmission: {:.2}\n",
" Q / W Specular Transmission: {:.2}\n",
" A / S Thickness: {:.2}\n",
" Z / X IOR: {:.2}\n",
" E / R Perceptual Roughness: {:.2}\n",
" U / I Reflectance: {:.2}\n",
" Arrow Keys Control Camera\n",
" C Randomize Colors\n",
" H HDR + Bloom: {}\n",
" D Depth Prepass: {}\n",
" T TAA: {}\n",
),
camera_3d.screen_space_specular_transmission_quality,
camera_3d.screen_space_specular_transmission_steps,
state.diffuse_transmission,
state.specular_transmission,
state.thickness,
state.ior,
state.perceptual_roughness,
state.reflectance,
if camera.hdr { "ON " } else { "OFF" },
if cfg!(any(feature = "webgpu", not(target_arch = "wasm32"))) {
if depth_prepass.is_some() {
"ON "
} else {
"OFF"
}
} else {
"N/A (WebGL)"
},
if cfg!(any(feature = "webgpu", not(target_arch = "wasm32"))) {
if temporal_jitter.is_some() {
if depth_prepass.is_some() {
"ON "
} else {
"N/A (Needs Depth Prepass)"
}
} else {
"OFF"
}
} else {
"N/A (WebGL)"
},
);
}
fn flicker_system(
mut flame: Single<&mut Transform, (With<Flicker>, With<Mesh3d>)>,
light: Single<(&mut PointLight, &mut Transform), (With<Flicker>, Without<Mesh3d>)>,
time: Res<Time>,
) {
let s = time.elapsed_secs();
let a = ops::cos(s * 6.0) * 0.0125 + ops::cos(s * 4.0) * 0.025;
let b = ops::cos(s * 5.0) * 0.0125 + ops::cos(s * 3.0) * 0.025;
let c = ops::cos(s * 7.0) * 0.0125 + ops::cos(s * 2.0) * 0.025;
let (mut light, mut light_transform) = light.into_inner();
light.intensity = 4_000.0 + 3000.0 * (a + b + c);
flame.translation = Vec3::new(-1.0, 1.23, 0.0);
flame.look_at(Vec3::new(-1.0 - c, 1.7 - b, 0.0 - a), Vec3::X);
flame.rotate(Quat::from_euler(EulerRot::XYZ, 0.0, 0.0, PI / 2.0));
light_transform.translation = Vec3::new(-1.0 - c, 1.7, 0.0 - a);
flame.translation = Vec3::new(-1.0 - c, 1.23, 0.0 - a);
}

View File

@@ -0,0 +1,115 @@
//! Demonstrates how to use transparency in 3D.
//! Shows the effects of different blend modes.
//! The `fade_transparency` system smoothly changes the transparency over time.
use bevy::{math::ops, prelude::*};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, fade_transparency)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// Opaque plane, uses `alpha_mode: Opaque` by default
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(6.0, 6.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// Transparent sphere, uses `alpha_mode: Mask(f32)`
commands.spawn((
Mesh3d(meshes.add(Sphere::new(0.5).mesh().ico(3).unwrap())),
MeshMaterial3d(materials.add(StandardMaterial {
// Alpha channel of the color controls transparency.
// We set it to 0.0 here, because it will be changed over time in the
// `fade_transparency` function.
// Note that the transparency has no effect on the objects shadow.
base_color: Color::srgba(0.2, 0.7, 0.1, 0.0),
// Mask sets a cutoff for transparency. Alpha values below are fully transparent,
// alpha values above are fully opaque.
alpha_mode: AlphaMode::Mask(0.5),
..default()
})),
Transform::from_xyz(1.0, 0.5, -1.5),
));
// Transparent unlit sphere, uses `alpha_mode: Mask(f32)`
commands.spawn((
Mesh3d(meshes.add(Sphere::new(0.5).mesh().ico(3).unwrap())),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgba(0.2, 0.7, 0.1, 0.0),
alpha_mode: AlphaMode::Mask(0.5),
unlit: true,
..default()
})),
Transform::from_xyz(-1.0, 0.5, -1.5),
));
// Transparent cube, uses `alpha_mode: Blend`
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
// Notice how there is no need to set the `alpha_mode` explicitly here.
// When converting a color to a material using `into()`, the alpha mode is
// automatically set to `Blend` if the alpha channel is anything lower than 1.0.
MeshMaterial3d(materials.add(Color::srgba(0.5, 0.5, 1.0, 0.0))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// Transparent cube, uses `alpha_mode: AlphaToCoverage`
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgba(0.5, 1.0, 0.5, 0.0),
alpha_mode: AlphaMode::AlphaToCoverage,
..default()
})),
Transform::from_xyz(-1.5, 0.5, 0.0),
));
// Opaque sphere
commands.spawn((
Mesh3d(meshes.add(Sphere::new(0.5).mesh().ico(3).unwrap())),
MeshMaterial3d(materials.add(Color::srgb(0.7, 0.2, 0.1))),
Transform::from_xyz(0.0, 0.5, -1.5),
));
// Light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
));
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 3.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}
/// Fades the alpha channel of all materials between 0 and 1 over time.
/// Each blend mode responds differently to this:
/// - [`Opaque`](AlphaMode::Opaque): Ignores alpha channel altogether, these materials stay completely opaque.
/// - [`Mask(f32)`](AlphaMode::Mask): Object appears when the alpha value goes above the mask's threshold, disappears
/// when the alpha value goes back below the threshold.
/// - [`Blend`](AlphaMode::Blend): Object fades in and out smoothly.
/// - [`AlphaToCoverage`](AlphaMode::AlphaToCoverage): Object fades in and out
/// in steps corresponding to the number of multisample antialiasing (MSAA)
/// samples in use. For example, assuming 8xMSAA, the object will be
/// completely opaque, then will be 7/8 opaque (1/8 transparent), then will be
/// 6/8 opaque, then 5/8, etc.
pub fn fade_transparency(time: Res<Time>, mut materials: ResMut<Assets<StandardMaterial>>) {
let alpha = (ops::sin(time.elapsed_secs()) / 2.0) + 0.5;
for (_, material) in materials.iter_mut() {
material.base_color.set_alpha(alpha);
}
}

57
vendor/bevy/examples/3d/two_passes.rs vendored Normal file
View File

@@ -0,0 +1,57 @@
//! Renders two 3d passes to the same window from different perspectives.
use bevy::prelude::*;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
/// Set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// Cube
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// Light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
));
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// camera
commands.spawn((
Camera3d::default(),
Camera {
// renders after / on top of the main camera
order: 1,
clear_color: ClearColorConfig::None,
..default()
},
Transform::from_xyz(10.0, 10., -5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}

View File

@@ -0,0 +1,76 @@
//! Update a scene from a glTF file, either by spawning the scene as a child of another entity,
//! or by accessing the entities of the scene.
use bevy::{pbr::DirectionalLightShadowMap, prelude::*};
fn main() {
App::new()
.insert_resource(DirectionalLightShadowMap { size: 4096 })
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, move_scene_entities)
.run();
}
#[derive(Component)]
struct MovedScene;
fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
commands.spawn((
Transform::from_xyz(4.0, 25.0, 8.0).looking_at(Vec3::ZERO, Vec3::Y),
DirectionalLight {
shadows_enabled: true,
..default()
},
));
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-0.5, 0.9, 1.5).looking_at(Vec3::new(-0.5, 0.3, 0.0), Vec3::Y),
EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 150.0,
..default()
},
));
// Spawn the scene as a child of this entity at the given transform
commands.spawn((
Transform::from_xyz(-1.0, 0.0, 0.0),
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
));
// Spawn a second scene, and add a tag component to be able to target it later
commands.spawn((
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
MovedScene,
));
}
// This system will move all entities that are descendants of MovedScene (which will be all entities spawned in the scene)
fn move_scene_entities(
time: Res<Time>,
moved_scene: Query<Entity, With<MovedScene>>,
children: Query<&Children>,
mut transforms: Query<&mut Transform>,
) {
for moved_scene_entity in &moved_scene {
let mut offset = 0.;
for entity in children.iter_descendants(moved_scene_entity) {
if let Ok(mut transform) = transforms.get_mut(entity) {
transform.translation = Vec3::new(
offset * ops::sin(time.elapsed_secs()) / 20.,
0.,
ops::cos(time.elapsed_secs()) / 20.,
);
offset += 0.5;
}
}
}
}

View File

@@ -0,0 +1,58 @@
//! Illustrates the use of vertex colors.
use bevy::{prelude::*, render::mesh::VertexAttributeValues};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// cube
// Assign vertex colors based on vertex positions
let mut colorful_cube = Mesh::from(Cuboid::default());
if let Some(VertexAttributeValues::Float32x3(positions)) =
colorful_cube.attribute(Mesh::ATTRIBUTE_POSITION)
{
let colors: Vec<[f32; 4]> = positions
.iter()
.map(|[r, g, b]| [(1. - *r) / 2., (1. - *g) / 2., (1. - *b) / 2., 1.])
.collect();
colorful_cube.insert_attribute(Mesh::ATTRIBUTE_COLOR, colors);
}
commands.spawn((
Mesh3d(meshes.add(colorful_cube)),
// This is the default color, but note that vertex colors are
// multiplied by the base color, so you'll likely want this to be
// white if using vertex colors.
MeshMaterial3d(materials.add(Color::srgb(1., 1., 1.))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// Light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 5.0, 4.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}

View File

@@ -0,0 +1,360 @@
//! Demonstrates visibility ranges, also known as HLODs.
use std::f32::consts::PI;
use bevy::{
core_pipeline::prepass::{DepthPrepass, NormalPrepass},
input::mouse::MouseWheel,
math::vec3,
pbr::{light_consts::lux::FULL_DAYLIGHT, CascadeShadowConfigBuilder},
prelude::*,
render::view::VisibilityRange,
};
// Where the camera is focused.
const CAMERA_FOCAL_POINT: Vec3 = vec3(0.0, 0.3, 0.0);
// Speed in units per frame.
const CAMERA_KEYBOARD_ZOOM_SPEED: f32 = 0.05;
// Speed in radians per frame.
const CAMERA_KEYBOARD_PAN_SPEED: f32 = 0.01;
// Speed in units per frame.
const CAMERA_MOUSE_MOVEMENT_SPEED: f32 = 0.25;
// The minimum distance that the camera is allowed to be from the model.
const MIN_ZOOM_DISTANCE: f32 = 0.5;
// The visibility ranges for high-poly and low-poly models respectively, when
// both models are being shown.
static NORMAL_VISIBILITY_RANGE_HIGH_POLY: VisibilityRange = VisibilityRange {
start_margin: 0.0..0.0,
end_margin: 3.0..4.0,
use_aabb: false,
};
static NORMAL_VISIBILITY_RANGE_LOW_POLY: VisibilityRange = VisibilityRange {
start_margin: 3.0..4.0,
end_margin: 8.0..9.0,
use_aabb: false,
};
// A visibility model that we use to always show a model (until the camera is so
// far zoomed out that it's culled entirely).
static SINGLE_MODEL_VISIBILITY_RANGE: VisibilityRange = VisibilityRange {
start_margin: 0.0..0.0,
end_margin: 8.0..9.0,
use_aabb: false,
};
// A visibility range that we use to completely hide a model.
static INVISIBLE_VISIBILITY_RANGE: VisibilityRange = VisibilityRange {
start_margin: 0.0..0.0,
end_margin: 0.0..0.0,
use_aabb: false,
};
// Allows us to identify the main model.
#[derive(Component, Debug, Clone, Copy, PartialEq)]
enum MainModel {
// The high-poly version.
HighPoly,
// The low-poly version.
LowPoly,
}
// The current mode.
#[derive(Default, Resource)]
struct AppStatus {
// Whether to show only one model.
show_one_model_only: Option<MainModel>,
// Whether to enable the prepass.
prepass: bool,
}
// Sets up the app.
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Visibility Range Example".into(),
..default()
}),
..default()
}))
.init_resource::<AppStatus>()
.add_systems(Startup, setup)
.add_systems(
Update,
(
move_camera,
set_visibility_ranges,
update_help_text,
update_mode,
toggle_prepass,
),
)
.run();
}
// Set up a simple 3D scene. Load the two meshes.
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
asset_server: Res<AssetServer>,
app_status: Res<AppStatus>,
) {
// Spawn a plane.
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(50.0, 50.0))),
MeshMaterial3d(materials.add(Color::srgb(0.1, 0.2, 0.1))),
));
// Spawn the two HLODs.
commands.spawn((
SceneRoot(
asset_server
.load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")),
),
MainModel::HighPoly,
));
commands.spawn((
SceneRoot(
asset_server.load(
GltfAssetLabel::Scene(0)
.from_asset("models/FlightHelmetLowPoly/FlightHelmetLowPoly.gltf"),
),
),
MainModel::LowPoly,
));
// Spawn a light.
commands.spawn((
DirectionalLight {
illuminance: FULL_DAYLIGHT,
shadows_enabled: true,
..default()
},
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)),
CascadeShadowConfigBuilder {
maximum_distance: 30.0,
first_cascade_far_bound: 0.9,
..default()
}
.build(),
));
// Spawn a camera.
commands
.spawn((
Camera3d::default(),
Transform::from_xyz(0.7, 0.7, 1.0).looking_at(CAMERA_FOCAL_POINT, Vec3::Y),
))
.insert(EnvironmentMapLight {
diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"),
specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
intensity: 150.0,
..default()
});
// Create the text.
commands.spawn((
app_status.create_text(),
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// We need to add the `VisibilityRange` components manually, as glTF currently
// has no way to specify visibility ranges. This system watches for new meshes,
// determines which `Scene` they're under, and adds the `VisibilityRange`
// component as appropriate.
fn set_visibility_ranges(
mut commands: Commands,
mut new_meshes: Query<Entity, Added<Mesh3d>>,
children: Query<(Option<&ChildOf>, Option<&MainModel>)>,
) {
// Loop over each newly-added mesh.
for new_mesh in new_meshes.iter_mut() {
// Search for the nearest ancestor `MainModel` component.
let (mut current, mut main_model) = (new_mesh, None);
while let Ok((child_of, maybe_main_model)) = children.get(current) {
if let Some(model) = maybe_main_model {
main_model = Some(model);
break;
}
match child_of {
Some(child_of) => current = child_of.parent(),
None => break,
}
}
// Add the `VisibilityRange` component.
match main_model {
Some(MainModel::HighPoly) => {
commands
.entity(new_mesh)
.insert(NORMAL_VISIBILITY_RANGE_HIGH_POLY.clone())
.insert(MainModel::HighPoly);
}
Some(MainModel::LowPoly) => {
commands
.entity(new_mesh)
.insert(NORMAL_VISIBILITY_RANGE_LOW_POLY.clone())
.insert(MainModel::LowPoly);
}
None => {}
}
}
}
// Process the movement controls.
fn move_camera(
keyboard_input: Res<ButtonInput<KeyCode>>,
mut mouse_wheel_events: EventReader<MouseWheel>,
mut cameras: Query<&mut Transform, With<Camera3d>>,
) {
let (mut zoom_delta, mut theta_delta) = (0.0, 0.0);
// Process zoom in and out via the keyboard.
if keyboard_input.pressed(KeyCode::KeyW) || keyboard_input.pressed(KeyCode::ArrowUp) {
zoom_delta -= CAMERA_KEYBOARD_ZOOM_SPEED;
} else if keyboard_input.pressed(KeyCode::KeyS) || keyboard_input.pressed(KeyCode::ArrowDown) {
zoom_delta += CAMERA_KEYBOARD_ZOOM_SPEED;
}
// Process left and right pan via the keyboard.
if keyboard_input.pressed(KeyCode::KeyA) || keyboard_input.pressed(KeyCode::ArrowLeft) {
theta_delta -= CAMERA_KEYBOARD_PAN_SPEED;
} else if keyboard_input.pressed(KeyCode::KeyD) || keyboard_input.pressed(KeyCode::ArrowRight) {
theta_delta += CAMERA_KEYBOARD_PAN_SPEED;
}
// Process zoom in and out via the mouse wheel.
for event in mouse_wheel_events.read() {
zoom_delta -= event.y * CAMERA_MOUSE_MOVEMENT_SPEED;
}
// Update the camera transform.
for transform in cameras.iter_mut() {
let transform = transform.into_inner();
let direction = transform.translation.normalize_or_zero();
let magnitude = transform.translation.length();
let new_direction = Mat3::from_rotation_y(theta_delta) * direction;
let new_magnitude = (magnitude + zoom_delta).max(MIN_ZOOM_DISTANCE);
transform.translation = new_direction * new_magnitude;
transform.look_at(CAMERA_FOCAL_POINT, Vec3::Y);
}
}
// Toggles modes if the user requests.
fn update_mode(
mut meshes: Query<(&mut VisibilityRange, &MainModel)>,
keyboard_input: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
) {
// Toggle the mode as requested.
if keyboard_input.just_pressed(KeyCode::Digit1) || keyboard_input.just_pressed(KeyCode::Numpad1)
{
app_status.show_one_model_only = None;
} else if keyboard_input.just_pressed(KeyCode::Digit2)
|| keyboard_input.just_pressed(KeyCode::Numpad2)
{
app_status.show_one_model_only = Some(MainModel::HighPoly);
} else if keyboard_input.just_pressed(KeyCode::Digit3)
|| keyboard_input.just_pressed(KeyCode::Numpad3)
{
app_status.show_one_model_only = Some(MainModel::LowPoly);
} else {
return;
}
// Update the visibility ranges as appropriate.
for (mut visibility_range, main_model) in meshes.iter_mut() {
*visibility_range = match (main_model, app_status.show_one_model_only) {
(&MainModel::HighPoly, Some(MainModel::LowPoly))
| (&MainModel::LowPoly, Some(MainModel::HighPoly)) => {
INVISIBLE_VISIBILITY_RANGE.clone()
}
(&MainModel::HighPoly, Some(MainModel::HighPoly))
| (&MainModel::LowPoly, Some(MainModel::LowPoly)) => {
SINGLE_MODEL_VISIBILITY_RANGE.clone()
}
(&MainModel::HighPoly, None) => NORMAL_VISIBILITY_RANGE_HIGH_POLY.clone(),
(&MainModel::LowPoly, None) => NORMAL_VISIBILITY_RANGE_LOW_POLY.clone(),
}
}
}
// Toggles the prepass if the user requests.
fn toggle_prepass(
mut commands: Commands,
cameras: Query<Entity, With<Camera3d>>,
keyboard_input: Res<ButtonInput<KeyCode>>,
mut app_status: ResMut<AppStatus>,
) {
if !keyboard_input.just_pressed(KeyCode::Space) {
return;
}
app_status.prepass = !app_status.prepass;
for camera in cameras.iter() {
if app_status.prepass {
commands
.entity(camera)
.insert(DepthPrepass)
.insert(NormalPrepass);
} else {
commands
.entity(camera)
.remove::<DepthPrepass>()
.remove::<NormalPrepass>();
}
}
}
// A system that updates the help text.
fn update_help_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) {
for mut text in text_query.iter_mut() {
*text = app_status.create_text();
}
}
impl AppStatus {
// Creates and returns help text reflecting the app status.
fn create_text(&self) -> Text {
format!(
"\
{} (1) Switch from high-poly to low-poly based on camera distance
{} (2) Show only the high-poly model
{} (3) Show only the low-poly model
Press 1, 2, or 3 to switch which model is shown
Press WASD or use the mouse wheel to move the camera
Press Space to {} the prepass",
if self.show_one_model_only.is_none() {
'>'
} else {
' '
},
if self.show_one_model_only == Some(MainModel::HighPoly) {
'>'
} else {
' '
},
if self.show_one_model_only == Some(MainModel::LowPoly) {
'>'
} else {
' '
},
if self.prepass { "disable" } else { "enable" }
)
.into()
}
}

View File

@@ -0,0 +1,269 @@
//! Demonstrates volumetric fog and lighting (light shafts or god rays).
use bevy::{
color::palettes::css::RED,
core_pipeline::{bloom::Bloom, tonemapping::Tonemapping, Skybox},
math::vec3,
pbr::{FogVolume, VolumetricFog, VolumetricLight},
prelude::*,
};
const DIRECTIONAL_LIGHT_MOVEMENT_SPEED: f32 = 0.02;
/// The current settings that the user has chosen.
#[derive(Resource)]
struct AppSettings {
/// Whether volumetric spot light is on.
volumetric_spotlight: bool,
/// Whether volumetric point light is on.
volumetric_pointlight: bool,
}
impl Default for AppSettings {
fn default() -> Self {
Self {
volumetric_spotlight: true,
volumetric_pointlight: true,
}
}
}
// Define a struct to store parameters for the point light's movement.
#[derive(Component)]
struct MoveBackAndForthHorizontally {
min_x: f32,
max_x: f32,
speed: f32,
}
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.insert_resource(ClearColor(Color::Srgba(Srgba {
red: 0.02,
green: 0.02,
blue: 0.02,
alpha: 1.0,
})))
.insert_resource(AmbientLight::NONE)
.init_resource::<AppSettings>()
.add_systems(Startup, setup)
.add_systems(Update, tweak_scene)
.add_systems(Update, (move_directional_light, move_point_light))
.add_systems(Update, adjust_app_settings)
.run();
}
/// Initializes the scene.
fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_settings: Res<AppSettings>) {
// Spawn the glTF scene.
commands.spawn(SceneRoot(asset_server.load(
GltfAssetLabel::Scene(0).from_asset("models/VolumetricFogExample/VolumetricFogExample.glb"),
)));
// Spawn the camera.
commands
.spawn((
Camera3d::default(),
Camera {
hdr: true,
..default()
},
Transform::from_xyz(-1.7, 1.5, 4.5).looking_at(vec3(-1.5, 1.7, 3.5), Vec3::Y),
Tonemapping::TonyMcMapface,
Bloom::default(),
))
.insert(Skybox {
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
brightness: 1000.0,
..default()
})
.insert(VolumetricFog {
// This value is explicitly set to 0 since we have no environment map light
ambient_intensity: 0.0,
..default()
});
// Add the point light
commands.spawn((
Transform::from_xyz(-0.4, 1.9, 1.0),
PointLight {
shadows_enabled: true,
range: 150.0,
color: RED.into(),
intensity: 1000.0,
..default()
},
VolumetricLight,
MoveBackAndForthHorizontally {
min_x: -1.93,
max_x: -0.4,
speed: -0.2,
},
));
// Add the spot light
commands.spawn((
Transform::from_xyz(-1.8, 3.9, -2.7).looking_at(Vec3::ZERO, Vec3::Y),
SpotLight {
intensity: 5000.0, // lumens
color: Color::WHITE,
shadows_enabled: true,
inner_angle: 0.76,
outer_angle: 0.94,
..default()
},
VolumetricLight,
));
// Add the fog volume.
commands.spawn((
FogVolume::default(),
Transform::from_scale(Vec3::splat(35.0)),
));
// Add the help text.
commands.spawn((
create_text(&app_settings),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
fn create_text(app_settings: &AppSettings) -> Text {
format!(
"{}\n{}\n{}",
"Press WASD or the arrow keys to change the direction of the directional light",
if app_settings.volumetric_pointlight {
"Press P to turn volumetric point light off"
} else {
"Press P to turn volumetric point light on"
},
if app_settings.volumetric_spotlight {
"Press L to turn volumetric spot light off"
} else {
"Press L to turn volumetric spot light on"
}
)
.into()
}
/// A system that makes directional lights in the glTF scene into volumetric
/// lights with shadows.
fn tweak_scene(
mut commands: Commands,
mut lights: Query<(Entity, &mut DirectionalLight), Changed<DirectionalLight>>,
) {
for (light, mut directional_light) in lights.iter_mut() {
// Shadows are needed for volumetric lights to work.
directional_light.shadows_enabled = true;
commands.entity(light).insert(VolumetricLight);
}
}
/// Processes user requests to move the directional light.
fn move_directional_light(
input: Res<ButtonInput<KeyCode>>,
mut directional_lights: Query<&mut Transform, With<DirectionalLight>>,
) {
let mut delta_theta = Vec2::ZERO;
if input.pressed(KeyCode::KeyW) || input.pressed(KeyCode::ArrowUp) {
delta_theta.y += DIRECTIONAL_LIGHT_MOVEMENT_SPEED;
}
if input.pressed(KeyCode::KeyS) || input.pressed(KeyCode::ArrowDown) {
delta_theta.y -= DIRECTIONAL_LIGHT_MOVEMENT_SPEED;
}
if input.pressed(KeyCode::KeyA) || input.pressed(KeyCode::ArrowLeft) {
delta_theta.x += DIRECTIONAL_LIGHT_MOVEMENT_SPEED;
}
if input.pressed(KeyCode::KeyD) || input.pressed(KeyCode::ArrowRight) {
delta_theta.x -= DIRECTIONAL_LIGHT_MOVEMENT_SPEED;
}
if delta_theta == Vec2::ZERO {
return;
}
let delta_quat = Quat::from_euler(EulerRot::XZY, delta_theta.y, 0.0, delta_theta.x);
for mut transform in directional_lights.iter_mut() {
transform.rotate(delta_quat);
}
}
// Toggle point light movement between left and right.
fn move_point_light(
timer: Res<Time>,
mut objects: Query<(&mut Transform, &mut MoveBackAndForthHorizontally)>,
) {
for (mut transform, mut move_data) in objects.iter_mut() {
let mut translation = transform.translation;
let mut need_toggle = false;
translation.x += move_data.speed * timer.delta_secs();
if translation.x > move_data.max_x {
translation.x = move_data.max_x;
need_toggle = true;
} else if translation.x < move_data.min_x {
translation.x = move_data.min_x;
need_toggle = true;
}
if need_toggle {
move_data.speed = -move_data.speed;
}
transform.translation = translation;
}
}
// Adjusts app settings per user input.
fn adjust_app_settings(
mut commands: Commands,
keyboard_input: Res<ButtonInput<KeyCode>>,
mut app_settings: ResMut<AppSettings>,
mut point_lights: Query<Entity, With<PointLight>>,
mut spot_lights: Query<Entity, With<SpotLight>>,
mut text: Query<&mut Text>,
) {
// If there are no changes, we're going to bail for efficiency. Record that
// here.
let mut any_changes = false;
// If the user pressed P, toggle volumetric state of the point light.
if keyboard_input.just_pressed(KeyCode::KeyP) {
app_settings.volumetric_pointlight = !app_settings.volumetric_pointlight;
any_changes = true;
}
// If the user pressed L, toggle volumetric state of the spot light.
if keyboard_input.just_pressed(KeyCode::KeyL) {
app_settings.volumetric_spotlight = !app_settings.volumetric_spotlight;
any_changes = true;
}
// If there were no changes, bail out.
if !any_changes {
return;
}
// Update volumetric settings.
for point_light in point_lights.iter_mut() {
if app_settings.volumetric_pointlight {
commands.entity(point_light).insert(VolumetricLight);
} else {
commands.entity(point_light).remove::<VolumetricLight>();
}
}
for spot_light in spot_lights.iter_mut() {
if app_settings.volumetric_spotlight {
commands.entity(spot_light).insert(VolumetricLight);
} else {
commands.entity(spot_light).remove::<VolumetricLight>();
}
}
// Update the help text.
for mut text in text.iter_mut() {
*text = create_text(&app_settings);
}
}

158
vendor/bevy/examples/3d/wireframe.rs vendored Normal file
View File

@@ -0,0 +1,158 @@
//! Showcases wireframe rendering.
//!
//! Wireframes currently do not work when using webgl or webgpu.
//! Supported platforms:
//! - DX12
//! - Vulkan
//! - Metal
//!
//! This is a native only feature.
use bevy::{
color::palettes::css::*,
pbr::wireframe::{NoWireframe, Wireframe, WireframeColor, WireframeConfig, WireframePlugin},
prelude::*,
render::{
render_resource::WgpuFeatures,
settings::{RenderCreation, WgpuSettings},
RenderPlugin,
},
};
fn main() {
App::new()
.add_plugins((
DefaultPlugins.set(RenderPlugin {
render_creation: RenderCreation::Automatic(WgpuSettings {
// WARN this is a native only feature. It will not work with webgl or webgpu
features: WgpuFeatures::POLYGON_MODE_LINE,
..default()
}),
..default()
}),
// You need to add this plugin to enable wireframe rendering
WireframePlugin::default(),
))
// Wireframes can be configured with this resource. This can be changed at runtime.
.insert_resource(WireframeConfig {
// The global wireframe config enables drawing of wireframes on every mesh,
// except those with `NoWireframe`. Meshes with `Wireframe` will always have a wireframe,
// regardless of the global configuration.
global: true,
// Controls the default color of all wireframes. Used as the default color for global wireframes.
// Can be changed per mesh using the `WireframeColor` component.
default_color: WHITE.into(),
})
.add_systems(Startup, setup)
.add_systems(Update, update_colors)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// Red cube: Never renders a wireframe
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::from(RED))),
Transform::from_xyz(-1.0, 0.5, -1.0),
NoWireframe,
));
// Orange cube: Follows global wireframe setting
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::from(ORANGE))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// Green cube: Always renders a wireframe
commands.spawn((
Mesh3d(meshes.add(Cuboid::default())),
MeshMaterial3d(materials.add(Color::from(LIME))),
Transform::from_xyz(1.0, 0.5, 1.0),
Wireframe,
// This lets you configure the wireframe color of this entity.
// If not set, this will use the color in `WireframeConfig`
WireframeColor { color: LIME.into() },
));
// plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))),
MeshMaterial3d(materials.add(Color::from(BLUE))),
// You can insert this component without the `Wireframe` component
// to override the color of the global wireframe for this mesh
WireframeColor {
color: BLACK.into(),
},
));
// light
commands.spawn((PointLight::default(), Transform::from_xyz(2.0, 4.0, 2.0)));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// Text used to show controls
commands.spawn((
Text::default(),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// This system let's you toggle various wireframe settings
fn update_colors(
keyboard_input: Res<ButtonInput<KeyCode>>,
mut config: ResMut<WireframeConfig>,
mut wireframe_colors: Query<&mut WireframeColor, With<Wireframe>>,
mut text: Single<&mut Text>,
) {
text.0 = format!(
"Controls
---------------
Z - Toggle global
X - Change global color
C - Change color of the green cube wireframe
WireframeConfig
-------------
Global: {}
Color: {:?}",
config.global, config.default_color,
);
// Toggle showing a wireframe on all meshes
if keyboard_input.just_pressed(KeyCode::KeyZ) {
config.global = !config.global;
}
// Toggle the global wireframe color
if keyboard_input.just_pressed(KeyCode::KeyX) {
config.default_color = if config.default_color == WHITE.into() {
DEEP_PINK.into()
} else {
WHITE.into()
};
}
// Toggle the color of a wireframe using WireframeColor and not the global color
if keyboard_input.just_pressed(KeyCode::KeyC) {
for mut color in &mut wireframe_colors {
color.color = if color.color == LIME.into() {
RED.into()
} else {
LIME.into()
};
}
}
}

827
vendor/bevy/examples/README.md vendored Normal file
View File

@@ -0,0 +1,827 @@
<!-- MD024 - The Headers from the Platform-Specific Examples should be identical -->
<!-- Use 'cargo run -p build-templated-pages -- build-example-page' to generate the final example README.md -->
<!-- markdownlint-disable-file MD024 -->
# Examples
These examples demonstrate the main features of Bevy and how to use them.
To run an example, use the command `cargo run --example <Example>`, and add the option `--features x11` or `--features wayland` to force the example to run on a specific window compositor, e.g.
```sh
cargo run --features wayland --example hello_world
```
**⚠️ Note: for users of releases on crates.io!**
There are often large differences and incompatible API changes between the latest [crates.io](https://crates.io/crates/bevy) release and the development version of Bevy in the git main branch!
If you are using a released version of bevy, you need to make sure you are viewing the correct version of the examples!
- Latest release: [https://github.com/bevyengine/bevy/tree/latest/examples](https://github.com/bevyengine/bevy/tree/latest/examples)
- Specific version, such as `0.4`: [https://github.com/bevyengine/bevy/tree/v0.4.0/examples](https://github.com/bevyengine/bevy/tree/v0.4.0/examples)
When you clone the repo locally to run the examples, use `git checkout` to get the correct version:
```bash
# `latest` always points to the newest release
git checkout latest
# or use a specific version
git checkout v0.4.0
```
---
## Table of Contents
- [Examples](#examples)
- [Table of Contents](#table-of-contents)
- [The Bare Minimum](#the-bare-minimum)
- [Hello, World!](#hello-world)
- [Cross-Platform Examples](#cross-platform-examples)
- [2D Rendering](#2d-rendering)
- [3D Rendering](#3d-rendering)
- [Animation](#animation)
- [Application](#application)
- [Assets](#assets)
- [Async Tasks](#async-tasks)
- [Audio](#audio)
- [Camera](#camera)
- [Dev tools](#dev-tools)
- [Diagnostics](#diagnostics)
- [ECS (Entity Component System)](#ecs-entity-component-system)
- [Embedded](#embedded)
- [Games](#games)
- [Gizmos](#gizmos)
- [Helpers](#helpers)
- [Input](#input)
- [Math](#math)
- [Movement](#movement)
- [Picking](#picking)
- [Reflection](#reflection)
- [Remote Protocol](#remote-protocol)
- [Scene](#scene)
- [Shaders](#shaders)
- [State](#state)
- [Stress Tests](#stress-tests)
- [Time](#time)
- [Tools](#tools)
- [Transforms](#transforms)
- [UI (User Interface)](#ui-user-interface)
- [Window](#window)
- [Tests](#tests)
- [Platform-Specific Examples](#platform-specific-examples)
- [Android](#android)
- [Setup](#setup)
- [Build & Run](#build--run)
- [About `libc++_shared.so`](#about-libc_sharedso)
- [Old phones](#old-phones)
- [About `cargo-apk`](#about-cargo-apk)
- [iOS](#ios)
- [Setup](#setup-1)
- [Build & Run](#build--run-1)
- [Wasm](#wasm)
- [Setup](#setup-2)
- [Build & Run](#build--run-2)
- [WebGL2 and WebGPU](#webgl2-and-webgpu)
- [Audio in the browsers](#audio-in-the-browsers)
- [Optimizing](#optimizing)
- [Loading Assets](#loading-assets)
# The Bare Minimum
<!-- MD026 - Hello, World! looks better with the ! -->
<!-- markdownlint-disable-next-line MD026 -->
## Hello, World!
Example | Description
--- | ---
[`hello_world.rs`](./hello_world.rs) | Runs a minimal example that outputs "hello world"
# Cross-Platform Examples
## 2D Rendering
Example | Description
--- | ---
[2D Bloom](../examples/2d/bloom_2d.rs) | Illustrates bloom post-processing in 2d
[2D Rotation](../examples/2d/rotation.rs) | Demonstrates rotating entities in 2D with quaternions
[2D Shapes](../examples/2d/2d_shapes.rs) | Renders simple 2D primitive shapes like circles and polygons
[2D Viewport To World](../examples/2d/2d_viewport_to_world.rs) | Demonstrates how to use the `Camera::viewport_to_world_2d` method with a dynamic viewport and camera.
[2D Wireframe](../examples/2d/wireframe_2d.rs) | Showcases wireframes for 2d meshes
[Arc 2D Meshes](../examples/2d/mesh2d_arcs.rs) | Demonstrates UV-mapping of the circular segment and sector primitives
[CPU Drawing](../examples/2d/cpu_draw.rs) | Manually read/write the pixels of a texture
[Custom glTF vertex attribute 2D](../examples/2d/custom_gltf_vertex_attribute.rs) | Renders a glTF mesh in 2D with a custom vertex attribute
[Manual Mesh 2D](../examples/2d/mesh2d_manual.rs) | Renders a custom mesh "manually" with "mid-level" renderer apis
[Mesh 2D](../examples/2d/mesh2d.rs) | Renders a 2d mesh
[Mesh 2D With Vertex Colors](../examples/2d/mesh2d_vertex_color_texture.rs) | Renders a 2d mesh with vertex color attributes
[Mesh2d Alpha Mode](../examples/2d/mesh2d_alpha_mode.rs) | Used to test alpha modes with mesh2d
[Mesh2d Repeated Texture](../examples/2d/mesh2d_repeated_texture.rs) | Showcase of using `uv_transform` on the `ColorMaterial` of a `Mesh2d`
[Move Sprite](../examples/2d/move_sprite.rs) | Changes the transform of a sprite
[Pixel Grid Snapping](../examples/2d/pixel_grid_snap.rs) | Shows how to create graphics that snap to the pixel grid by rendering to a texture in 2D
[Sprite](../examples/2d/sprite.rs) | Renders a sprite
[Sprite Animation](../examples/2d/sprite_animation.rs) | Animates a sprite in response to an event
[Sprite Flipping](../examples/2d/sprite_flipping.rs) | Renders a sprite flipped along an axis
[Sprite Scale](../examples/2d/sprite_scale.rs) | Shows how a sprite can be scaled into a rectangle while keeping the aspect ratio
[Sprite Sheet](../examples/2d/sprite_sheet.rs) | Renders an animated sprite
[Sprite Slice](../examples/2d/sprite_slice.rs) | Showcases slicing sprites into sections that can be scaled independently via the 9-patch technique
[Sprite Tile](../examples/2d/sprite_tile.rs) | Renders a sprite tiled in a grid
[Text 2D](../examples/2d/text2d.rs) | Generates text in 2D
[Texture Atlas](../examples/2d/texture_atlas.rs) | Generates a texture atlas (sprite sheet) from individual sprites
[Transparency in 2D](../examples/2d/transparency_2d.rs) | Demonstrates transparency in 2d
## 3D Rendering
Example | Description
--- | ---
[3D Bloom](../examples/3d/bloom_3d.rs) | Illustrates bloom configuration using HDR and emissive materials
[3D Scene](../examples/3d/3d_scene.rs) | Simple 3D scene with basic shapes and lighting
[3D Shapes](../examples/3d/3d_shapes.rs) | A scene showcasing the built-in 3D shapes
[3D Viewport To World](../examples/3d/3d_viewport_to_world.rs) | Demonstrates how to use the `Camera::viewport_to_world` method
[Animated Material](../examples/3d/animated_material.rs) | Shows how to animate material properties
[Anisotropy](../examples/3d/anisotropy.rs) | Displays an example model with anisotropy
[Anti-aliasing](../examples/3d/anti_aliasing.rs) | Compares different anti-aliasing methods
[Atmosphere](../examples/3d/atmosphere.rs) | A scene showcasing pbr atmospheric scattering
[Atmospheric Fog](../examples/3d/atmospheric_fog.rs) | A scene showcasing the atmospheric fog effect
[Auto Exposure](../examples/3d/auto_exposure.rs) | A scene showcasing auto exposure
[Blend Modes](../examples/3d/blend_modes.rs) | Showcases different blend modes
[Built-in postprocessing](../examples/3d/post_processing.rs) | Demonstrates the built-in postprocessing features
[Camera sub view](../examples/3d/camera_sub_view.rs) | Demonstrates using different sub view effects on a camera
[Clearcoat](../examples/3d/clearcoat.rs) | Demonstrates the clearcoat PBR feature
[Clustered Decals](../examples/3d/clustered_decals.rs) | Demonstrates clustered decals
[Color grading](../examples/3d/color_grading.rs) | Demonstrates color grading
[Decal](../examples/3d/decal.rs) | Decal rendering
[Deferred Rendering](../examples/3d/deferred_rendering.rs) | Renders meshes with both forward and deferred pipelines
[Depth of field](../examples/3d/depth_of_field.rs) | Demonstrates depth of field
[Edit Gltf Material](../examples/3d/edit_material_on_gltf.rs) | Showcases changing materials of a Gltf after Scene spawn
[Fog](../examples/3d/fog.rs) | A scene showcasing the distance fog effect
[Fog volumes](../examples/3d/fog_volumes.rs) | Demonstrates fog volumes
[Generate Custom Mesh](../examples/3d/generate_custom_mesh.rs) | Simple showcase of how to generate a custom mesh with a custom texture
[Irradiance Volumes](../examples/3d/irradiance_volumes.rs) | Demonstrates irradiance volumes
[Lighting](../examples/3d/lighting.rs) | Illustrates various lighting options in a simple scene
[Lightmaps](../examples/3d/lightmaps.rs) | Rendering a scene with baked lightmaps
[Lines](../examples/3d/lines.rs) | Create a custom material to draw 3d lines
[Load glTF](../examples/3d/load_gltf.rs) | Loads and renders a glTF file as a scene
[Load glTF extras](../examples/3d/load_gltf_extras.rs) | Loads and renders a glTF file as a scene, including the gltf extras
[Mesh Ray Cast](../examples/3d/mesh_ray_cast.rs) | Demonstrates ray casting with the `MeshRayCast` system parameter
[Meshlet](../examples/3d/meshlet.rs) | Meshlet rendering for dense high-poly scenes (experimental)
[Mixed lighting](../examples/3d/mixed_lighting.rs) | Demonstrates how to combine baked and dynamic lighting
[Motion Blur](../examples/3d/motion_blur.rs) | Demonstrates per-pixel motion blur
[Occlusion Culling](../examples/3d/occlusion_culling.rs) | Demonstration of Occlusion Culling
[Order Independent Transparency](../examples/3d/order_independent_transparency.rs) | Demonstrates how to use OIT
[Orthographic View](../examples/3d/orthographic.rs) | Shows how to create a 3D orthographic view (for isometric-look in games or CAD applications)
[Parallax Mapping](../examples/3d/parallax_mapping.rs) | Demonstrates use of a normal map and depth map for parallax mapping
[Parenting](../examples/3d/parenting.rs) | Demonstrates parent->child relationships and relative transformations
[Percentage-closer soft shadows](../examples/3d/pcss.rs) | Demonstrates percentage-closer soft shadows (PCSS)
[Physically Based Rendering](../examples/3d/pbr.rs) | Demonstrates use of Physically Based Rendering (PBR) properties
[Query glTF primitives](../examples/3d/query_gltf_primitives.rs) | Query primitives in a glTF scene
[Reflection Probes](../examples/3d/reflection_probes.rs) | Demonstrates reflection probes
[Render to Texture](../examples/3d/render_to_texture.rs) | Shows how to render to a texture, useful for mirrors, UI, or exporting images
[Rotate Environment Map](../examples/3d/rotate_environment_map.rs) | Demonstrates how to rotate the skybox and the environment map simultaneously
[Screen Space Ambient Occlusion](../examples/3d/ssao.rs) | A scene showcasing screen space ambient occlusion
[Screen Space Reflections](../examples/3d/ssr.rs) | Demonstrates screen space reflections with water ripples
[Scrolling fog](../examples/3d/scrolling_fog.rs) | Demonstrates how to create the effect of fog moving in the wind
[Shadow Biases](../examples/3d/shadow_biases.rs) | Demonstrates how shadow biases affect shadows in a 3d scene
[Shadow Caster and Receiver](../examples/3d/shadow_caster_receiver.rs) | Demonstrates how to prevent meshes from casting/receiving shadows in a 3d scene
[Skybox](../examples/3d/skybox.rs) | Load a cubemap texture onto a cube like a skybox and cycle through different compressed texture formats.
[Specular Tint](../examples/3d/specular_tint.rs) | Demonstrates specular tints and maps
[Spherical Area Lights](../examples/3d/spherical_area_lights.rs) | Demonstrates how point light radius values affect light behavior
[Split Screen](../examples/3d/split_screen.rs) | Demonstrates how to render two cameras to the same window to accomplish "split screen"
[Spotlight](../examples/3d/spotlight.rs) | Illustrates spot lights
[Texture](../examples/3d/texture.rs) | Shows configuration of texture materials
[Tonemapping](../examples/3d/tonemapping.rs) | Compares tonemapping options
[Transmission](../examples/3d/transmission.rs) | Showcases light transmission in the PBR material
[Transparency in 3D](../examples/3d/transparency_3d.rs) | Demonstrates transparency in 3d
[Two Passes](../examples/3d/two_passes.rs) | Renders two 3d passes to the same window from different perspectives
[Update glTF Scene](../examples/3d/update_gltf_scene.rs) | Update a scene from a glTF file, either by spawning the scene as a child of another entity, or by accessing the entities of the scene
[Vertex Colors](../examples/3d/vertex_colors.rs) | Shows the use of vertex colors
[Visibility range](../examples/3d/visibility_range.rs) | Demonstrates visibility ranges
[Volumetric fog](../examples/3d/volumetric_fog.rs) | Demonstrates volumetric fog and lighting
[Wireframe](../examples/3d/wireframe.rs) | Showcases wireframe rendering
## Animation
Example | Description
--- | ---
[Animated Mesh](../examples/animation/animated_mesh.rs) | Plays an animation on a skinned glTF model of a fox
[Animated Mesh Control](../examples/animation/animated_mesh_control.rs) | Plays an animation from a skinned glTF with keyboard controls
[Animated Mesh Events](../examples/animation/animated_mesh_events.rs) | Plays an animation from a skinned glTF with events
[Animated Transform](../examples/animation/animated_transform.rs) | Create and play an animation defined by code that operates on the `Transform` component
[Animated UI](../examples/animation/animated_ui.rs) | Shows how to use animation clips to animate UI properties
[Animation Events](../examples/animation/animation_events.rs) | Demonstrate how to use animation events
[Animation Graph](../examples/animation/animation_graph.rs) | Blends multiple animations together with a graph
[Animation Masks](../examples/animation/animation_masks.rs) | Demonstrates animation masks
[Color animation](../examples/animation/color_animation.rs) | Demonstrates how to animate colors using mixing and splines in different color spaces
[Custom Skinned Mesh](../examples/animation/custom_skinned_mesh.rs) | Skinned mesh example with mesh and joints data defined in code
[Eased Motion](../examples/animation/eased_motion.rs) | Demonstrates the application of easing curves to animate an object
[Easing Functions](../examples/animation/easing_functions.rs) | Showcases the built-in easing functions
[Morph Targets](../examples/animation/morph_targets.rs) | Plays an animation from a glTF file with meshes with morph targets
[glTF Skinned Mesh](../examples/animation/gltf_skinned_mesh.rs) | Skinned mesh example with mesh and joints data loaded from a glTF file
## Application
Example | Description
--- | ---
[Advanced log layers](../examples/app/log_layers_ecs.rs) | Illustrate how to transfer data between log layers and Bevy's ECS
[Custom Loop](../examples/app/custom_loop.rs) | Demonstrates how to create a custom runner (to update an app manually)
[Drag and Drop](../examples/app/drag_and_drop.rs) | An example that shows how to handle drag and drop in an app
[Empty](../examples/app/empty.rs) | An empty application (does nothing)
[Empty with Defaults](../examples/app/empty_defaults.rs) | An empty application with default plugins
[Headless](../examples/app/headless.rs) | An application that runs without default plugins
[Headless Renderer](../examples/app/headless_renderer.rs) | An application that runs with no window, but renders into image file
[Log layers](../examples/app/log_layers.rs) | Illustrate how to add custom log layers
[Logs](../examples/app/logs.rs) | Illustrate how to use generate log output
[No Renderer](../examples/app/no_renderer.rs) | An application that runs with default plugins and displays an empty window, but without an actual renderer
[Plugin](../examples/app/plugin.rs) | Demonstrates the creation and registration of a custom plugin
[Plugin Group](../examples/app/plugin_group.rs) | Demonstrates the creation and registration of a custom plugin group
[Return after Run](../examples/app/return_after_run.rs) | Show how to return to main after the Bevy app has exited
[Thread Pool Resources](../examples/app/thread_pool_resources.rs) | Creates and customizes the internal thread pool
[Without Winit](../examples/app/without_winit.rs) | Create an application without winit (runs single time, no event loop)
## Assets
Example | Description
--- | ---
[Alter Mesh](../examples/asset/alter_mesh.rs) | Shows how to modify the underlying asset of a Mesh after spawning.
[Alter Sprite](../examples/asset/alter_sprite.rs) | Shows how to modify texture assets after spawning.
[Asset Decompression](../examples/asset/asset_decompression.rs) | Demonstrates loading a compressed asset
[Asset Loading](../examples/asset/asset_loading.rs) | Demonstrates various methods to load assets
[Asset Processing](../examples/asset/processing/asset_processing.rs) | Demonstrates how to process and load custom assets
[Asset Settings](../examples/asset/asset_settings.rs) | Demonstrates various methods of applying settings when loading an asset
[Custom Asset](../examples/asset/custom_asset.rs) | Implements a custom asset loader
[Custom Asset IO](../examples/asset/custom_asset_reader.rs) | Implements a custom AssetReader
[Embedded Asset](../examples/asset/embedded_asset.rs) | Embed an asset in the application binary and load it
[Extra asset source](../examples/asset/extra_source.rs) | Load an asset from a non-standard asset source
[Hot Reloading of Assets](../examples/asset/hot_asset_reloading.rs) | Demonstrates automatic reloading of assets when modified on disk
[Multi-asset synchronization](../examples/asset/multi_asset_sync.rs) | Demonstrates how to wait for multiple assets to be loaded.
[Repeated texture configuration](../examples/asset/repeated_texture.rs) | How to configure the texture to repeat instead of the default clamp to edges
## Async Tasks
Example | Description
--- | ---
[Async Compute](../examples/async_tasks/async_compute.rs) | How to use `AsyncComputeTaskPool` to complete longer running tasks
[External Source of Data on an External Thread](../examples/async_tasks/external_source_external_thread.rs) | How to use an external thread to run an infinite task and communicate with a channel
## Audio
Example | Description
--- | ---
[Audio](../examples/audio/audio.rs) | Shows how to load and play an audio file
[Audio Control](../examples/audio/audio_control.rs) | Shows how to load and play an audio file, and control how it's played
[Decodable](../examples/audio/decodable.rs) | Shows how to create and register a custom audio source by implementing the `Decodable` type.
[Pitch](../examples/audio/pitch.rs) | Shows how to directly play a simple pitch
[Soundtrack](../examples/audio/soundtrack.rs) | Shows how to play different soundtracks based on game state
[Spatial Audio 2D](../examples/audio/spatial_audio_2d.rs) | Shows how to play spatial audio, and moving the emitter in 2D
[Spatial Audio 3D](../examples/audio/spatial_audio_3d.rs) | Shows how to play spatial audio, and moving the emitter in 3D
## Camera
Example | Description
--- | ---
[2D top-down camera](../examples/camera/2d_top_down_camera.rs) | A 2D top-down camera smoothly following player movements
[Camera Orbit](../examples/camera/camera_orbit.rs) | Shows how to orbit a static scene using pitch, yaw, and roll.
[Custom Projection](../examples/camera/custom_projection.rs) | Shows how to create custom camera projections.
[First person view model](../examples/camera/first_person_view_model.rs) | A first-person camera that uses a world model and a view model with different field of views (FOV)
[Projection Zoom](../examples/camera/projection_zoom.rs) | Shows how to zoom orthographic and perspective projection cameras.
[Screen Shake](../examples/camera/2d_screen_shake.rs) | A simple 2D screen shake effect
## Dev tools
Example | Description
--- | ---
[FPS overlay](../examples/dev_tools/fps_overlay.rs) | Demonstrates FPS overlay
## Diagnostics
Example | Description
--- | ---
[Custom Diagnostic](../examples/diagnostics/custom_diagnostic.rs) | Shows how to create a custom diagnostic
[Enabling/disabling diagnostic](../examples/diagnostics/enabling_disabling_diagnostic.rs) | Shows how to disable/re-enable a Diagnostic during runtime
[Log Diagnostics](../examples/diagnostics/log_diagnostics.rs) | Add a plugin that logs diagnostics, like frames per second (FPS), to the console
## ECS (Entity Component System)
Example | Description
--- | ---
[Change Detection](../examples/ecs/change_detection.rs) | Change detection on components and resources
[Component Hooks](../examples/ecs/component_hooks.rs) | Define component hooks to manage component lifecycle events
[Custom Query Parameters](../examples/ecs/custom_query_param.rs) | Groups commonly used compound queries and query filters into a single type
[Custom Schedule](../examples/ecs/custom_schedule.rs) | Demonstrates how to add custom schedules
[Dynamic ECS](../examples/ecs/dynamic.rs) | Dynamically create components, spawn entities with those components and query those components
[ECS Guide](../examples/ecs/ecs_guide.rs) | Full guide to Bevy's ECS
[Entity disabling](../examples/ecs/entity_disabling.rs) | Demonstrates how to hide entities from the ECS without deleting them
[Error handling](../examples/ecs/error_handling.rs) | How to return and handle errors across the ECS
[Event](../examples/ecs/event.rs) | Illustrates event creation, activation, and reception
[Fallible System Parameters](../examples/ecs/fallible_params.rs) | Systems are skipped if their parameters cannot be acquired
[Fixed Timestep](../examples/ecs/fixed_timestep.rs) | Shows how to create systems that run every fixed timestep, rather than every tick
[Generic System](../examples/ecs/generic_system.rs) | Shows how to create systems that can be reused with different types
[Hierarchy](../examples/ecs/hierarchy.rs) | Creates a hierarchy of parents and children entities
[Immutable Components](../examples/ecs/immutable_components.rs) | Demonstrates the creation and utility of immutable components
[Iter Combinations](../examples/ecs/iter_combinations.rs) | Shows how to iterate over combinations of query results
[Nondeterministic System Order](../examples/ecs/nondeterministic_system_order.rs) | Systems run in parallel, but their order isn't always deterministic. Here's how to detect and fix this.
[Observer Propagation](../examples/ecs/observer_propagation.rs) | Demonstrates event propagation with observers
[Observers](../examples/ecs/observers.rs) | Demonstrates observers that react to events (both built-in life-cycle events and custom events)
[One Shot Systems](../examples/ecs/one_shot_systems.rs) | Shows how to flexibly run systems without scheduling them
[Parallel Query](../examples/ecs/parallel_query.rs) | Illustrates parallel queries with `ParallelIterator`
[Relationships](../examples/ecs/relationships.rs) | Define and work with custom relationships between entities
[Removal Detection](../examples/ecs/removal_detection.rs) | Query for entities that had a specific component removed earlier in the current frame
[Run Conditions](../examples/ecs/run_conditions.rs) | Run systems only when one or multiple conditions are met
[Send and receive events](../examples/ecs/send_and_receive_events.rs) | Demonstrates how to send and receive events of the same type in a single system
[Startup System](../examples/ecs/startup_system.rs) | Demonstrates a startup system (one that runs once when the app starts up)
[System Closure](../examples/ecs/system_closure.rs) | Show how to use closures as systems, and how to configure `Local` variables by capturing external state
[System Parameter](../examples/ecs/system_param.rs) | Illustrates creating custom system parameters with `SystemParam`
[System Piping](../examples/ecs/system_piping.rs) | Pipe the output of one system into a second, allowing you to handle any errors gracefully
[System Stepping](../examples/ecs/system_stepping.rs) | Demonstrate stepping through systems in order of execution.
## Embedded
Example | Description
--- | ---
[`no_std` Compatible Library](../examples/no_std/library/src/lib.rs) | Example library compatible with `std` and `no_std` targets
## Games
Example | Description
--- | ---
[Alien Cake Addict](../examples/games/alien_cake_addict.rs) | Eat the cakes. Eat them all. An example 3D game
[Breakout](../examples/games/breakout.rs) | An implementation of the classic game "Breakout".
[Contributors](../examples/games/contributors.rs) | Displays each contributor as a bouncy bevy-ball!
[Desk Toy](../examples/games/desk_toy.rs) | Bevy logo as a desk toy using transparent windows! Now with Googly Eyes!
[Game Menu](../examples/games/game_menu.rs) | A simple game menu
[Loading Screen](../examples/games/loading_screen.rs) | Demonstrates how to create a loading screen that waits for all assets to be loaded and render pipelines to be compiled.
## Gizmos
Example | Description
--- | ---
[2D Gizmos](../examples/gizmos/2d_gizmos.rs) | A scene showcasing 2D gizmos
[3D Gizmos](../examples/gizmos/3d_gizmos.rs) | A scene showcasing 3D gizmos
[Axes](../examples/gizmos/axes.rs) | Demonstrates the function of axes gizmos
[Light Gizmos](../examples/gizmos/light_gizmos.rs) | A scene showcasing light gizmos
## Helpers
Example | Description
--- | ---
[Camera Controller](../examples/helpers/camera_controller.rs) | Example Free-Cam Styled Camera Controller
[Widgets](../examples/helpers/widgets.rs) | Example UI Widgets
## Input
Example | Description
--- | ---
[Char Input Events](../examples/input/char_input_events.rs) | Prints out all chars as they are inputted
[Gamepad Input](../examples/input/gamepad_input.rs) | Shows handling of gamepad input, connections, and disconnections
[Gamepad Input Events](../examples/input/gamepad_input_events.rs) | Iterates and prints gamepad input and connection events
[Gamepad Rumble](../examples/input/gamepad_rumble.rs) | Shows how to rumble a gamepad using force feedback
[Keyboard Input](../examples/input/keyboard_input.rs) | Demonstrates handling a key press/release
[Keyboard Input Events](../examples/input/keyboard_input_events.rs) | Prints out all keyboard events
[Keyboard Modifiers](../examples/input/keyboard_modifiers.rs) | Demonstrates using key modifiers (ctrl, shift)
[Mouse Grab](../examples/input/mouse_grab.rs) | Demonstrates how to grab the mouse, locking the cursor to the app's screen
[Mouse Input](../examples/input/mouse_input.rs) | Demonstrates handling a mouse button press/release
[Mouse Input Events](../examples/input/mouse_input_events.rs) | Prints out all mouse events (buttons, movement, etc.)
[Text Input](../examples/input/text_input.rs) | Simple text input with IME support
[Touch Input](../examples/input/touch_input.rs) | Displays touch presses, releases, and cancels
[Touch Input Events](../examples/input/touch_input_events.rs) | Prints out all touch inputs
## Math
Example | Description
--- | ---
[Bounding Volume Intersections (2D)](../examples/math/bounding_2d.rs) | Showcases bounding volumes and intersection tests
[Cubic Splines](../examples/math/cubic_splines.rs) | Exhibits different modes of constructing cubic curves using splines
[Custom Primitives](../examples/math/custom_primitives.rs) | Demonstrates how to add custom primitives and useful traits for them.
[Random Sampling](../examples/math/random_sampling.rs) | Demonstrates how to sample random points from mathematical primitives
[Rendering Primitives](../examples/math/render_primitives.rs) | Shows off rendering for all math primitives as both Meshes and Gizmos
[Sampling Primitives](../examples/math/sampling_primitives.rs) | Demonstrates all the primitives which can be sampled.
[Smooth Follow](../examples/movement/smooth_follow.rs) | Demonstrates how to make an entity smoothly follow another using interpolation
## Movement
Example | Description
--- | ---
[Run physics in a fixed timestep](../examples/movement/physics_in_fixed_timestep.rs) | Handles input, physics, and rendering in an industry-standard way by using a fixed timestep
## Picking
Example | Description
--- | ---
[Mesh Picking](../examples/picking/mesh_picking.rs) | Demonstrates picking meshes
[Picking Debug Tools](../examples/picking/debug_picking.rs) | Demonstrates picking debug overlay
[Showcases simple picking events and usage](../examples/picking/simple_picking.rs) | Demonstrates how to use picking events to spawn simple objects
[Sprite Picking](../examples/picking/sprite_picking.rs) | Demonstrates picking sprites and sprite atlases
## Reflection
Example | Description
--- | ---
[Custom Attributes](../examples/reflection/custom_attributes.rs) | Registering and accessing custom attributes on reflected types
[Dynamic Types](../examples/reflection/dynamic_types.rs) | How dynamic types are used with reflection
[Function Reflection](../examples/reflection/function_reflection.rs) | Demonstrates how functions can be called dynamically using reflection
[Generic Reflection](../examples/reflection/generic_reflection.rs) | Registers concrete instances of generic types that may be used with reflection
[Reflection](../examples/reflection/reflection.rs) | Demonstrates how reflection in Bevy provides a way to dynamically interact with Rust types
[Reflection Types](../examples/reflection/reflection_types.rs) | Illustrates the various reflection types available
[Type Data](../examples/reflection/type_data.rs) | Demonstrates how to create and use type data
## Remote Protocol
Example | Description
--- | ---
[client](../examples/remote/client.rs) | A simple command line client that can control Bevy apps via the BRP
[server](../examples/remote/server.rs) | A Bevy app that you can connect to with the BRP and edit
## Scene
Example | Description
--- | ---
[Scene](../examples/scene/scene.rs) | Demonstrates loading from and saving scenes to files
## Shaders
These examples demonstrate how to implement different shaders in user code.
A shader in its most common usage is a small program that is run by the GPU per-vertex in a mesh (a vertex shader) or per-affected-screen-fragment (a fragment shader.) The GPU executes these programs in a highly parallel way.
There are also compute shaders which are used for more general processing leveraging the GPU's parallelism.
Example | Description
--- | ---
[Animated](../examples/shader/animate_shader.rs) | A shader that uses dynamic data like the time since startup
[Array Texture](../examples/shader/array_texture.rs) | A shader that shows how to reuse the core bevy PBR shading functionality in a custom material that obtains the base color from an array texture.
[Compute - Game of Life](../examples/shader/compute_shader_game_of_life.rs) | A compute shader that simulates Conway's Game of Life
[Custom Render Phase](../examples/shader/custom_render_phase.rs) | Shows how to make a complete render phase
[Custom Vertex Attribute](../examples/shader/custom_vertex_attribute.rs) | A shader that reads a mesh's custom vertex attribute
[Custom phase item](../examples/shader/custom_phase_item.rs) | Demonstrates how to enqueue custom draw commands in a render phase
[Extended Bindless Material](../examples/shader/extended_material_bindless.rs) | Demonstrates bindless `ExtendedMaterial`
[Extended Material](../examples/shader/extended_material.rs) | A custom shader that builds on the standard material
[GPU readback](../examples/shader/gpu_readback.rs) | A very simple compute shader that writes to a buffer that is read by the cpu
[Instancing](../examples/shader/custom_shader_instancing.rs) | A shader that renders a mesh multiple times in one draw call using low level rendering api
[Instancing](../examples/shader/automatic_instancing.rs) | Shows that multiple instances of a cube are automatically instanced in one draw call
[Material](../examples/shader/shader_material.rs) | A shader and a material that uses it
[Material](../examples/shader/shader_material_2d.rs) | A shader and a material that uses it on a 2d mesh
[Material - Bindless](../examples/shader/shader_material_bindless.rs) | Demonstrates how to make materials that use bindless textures
[Material - GLSL](../examples/shader/shader_material_glsl.rs) | A shader that uses the GLSL shading language
[Material - Screenspace Texture](../examples/shader/shader_material_screenspace_texture.rs) | A shader that samples a texture with view-independent UV coordinates
[Material - WESL](../examples/shader/shader_material_wesl.rs) | A shader that uses WESL
[Material Prepass](../examples/shader/shader_prepass.rs) | A shader that uses the various textures generated by the prepass
[Post Processing - Custom Render Pass](../examples/shader/custom_post_processing.rs) | A custom post processing effect, using a custom render pass that runs after the main pass
[Shader Defs](../examples/shader/shader_defs.rs) | A shader that uses "shaders defs" (a bevy tool to selectively toggle parts of a shader)
[Specialized Mesh Pipeline](../examples/shader/specialized_mesh_pipeline.rs) | Demonstrates how to write a specialized mesh pipeline
[Storage Buffer](../examples/shader/storage_buffer.rs) | A shader that shows how to bind a storage buffer using a custom material.
[Texture Binding Array (Bindless Textures)](../examples/shader/texture_binding_array.rs) | A shader that shows how to bind and sample multiple textures as a binding array (a.k.a. bindless textures).
## State
Example | Description
--- | ---
[Computed States](../examples/state/computed_states.rs) | Advanced state patterns using Computed States.
[Custom State Transition Behavior](../examples/state/custom_transitions.rs) | Creating and working with custom state transition schedules.
[States](../examples/state/states.rs) | Illustrates how to use States to control transitioning from a Menu state to an InGame state.
[Sub States](../examples/state/sub_states.rs) | Using Sub States for hierarchical state handling.
## Stress Tests
These examples are used to test the performance and stability of various parts of the engine in an isolated way.
Due to the focus on performance it's recommended to run the stress tests in release mode:
```sh
cargo run --release --example <example name>
```
Example | Description
--- | ---
[Bevymark](../examples/stress_tests/bevymark.rs) | A heavy sprite rendering workload to benchmark your system with Bevy
[Many Animated Materials](../examples/stress_tests/many_materials.rs) | Benchmark to test rendering many animated materials
[Many Animated Sprites](../examples/stress_tests/many_animated_sprites.rs) | Displays many animated sprites in a grid arrangement with slight offsets to their animation timers. Used for performance testing.
[Many Buttons](../examples/stress_tests/many_buttons.rs) | Test rendering of many UI elements
[Many Cameras & Lights](../examples/stress_tests/many_cameras_lights.rs) | Test rendering of many cameras and lights
[Many Components (and Entities and Systems)](../examples/stress_tests/many_components.rs) | Test large ECS systems
[Many Cubes](../examples/stress_tests/many_cubes.rs) | Simple benchmark to test per-entity draw overhead. Run with the `sphere` argument to test frustum culling
[Many Foxes](../examples/stress_tests/many_foxes.rs) | Loads an animated fox model and spawns lots of them. Good for testing skinned mesh performance. Takes an unsigned integer argument for the number of foxes to spawn. Defaults to 1000
[Many Gizmos](../examples/stress_tests/many_gizmos.rs) | Test rendering of many gizmos
[Many Glyphs](../examples/stress_tests/many_glyphs.rs) | Simple benchmark to test text rendering.
[Many Lights](../examples/stress_tests/many_lights.rs) | Simple benchmark to test rendering many point lights. Run with `WGPU_SETTINGS_PRIO=webgl2` to restrict to uniform buffers and max 256 lights
[Many Sprites](../examples/stress_tests/many_sprites.rs) | Displays many sprites in a grid arrangement! Used for performance testing. Use `--colored` to enable color tinted sprites.
[Many Text2d](../examples/stress_tests/many_text2d.rs) | Displays many Text2d! Used for performance testing.
[Text Pipeline](../examples/stress_tests/text_pipeline.rs) | Text Pipeline benchmark
[Transform Hierarchy](../examples/stress_tests/transform_hierarchy.rs) | Various test cases for hierarchy and transform propagation performance
## Time
Example | Description
--- | ---
[Time handling](../examples/time/time.rs) | Explains how Time is handled in ECS
[Timers](../examples/time/timers.rs) | Illustrates ticking `Timer` resources inside systems and handling their state
[Virtual time](../examples/time/virtual_time.rs) | Shows how `Time<Virtual>` can be used to pause, resume, slow down and speed up a game.
## Tools
Example | Description
--- | ---
[Gamepad Viewer](../examples/tools/gamepad_viewer.rs) | Shows a visualization of gamepad buttons, sticks, and triggers
[Scene Viewer](../examples/tools/scene_viewer/main.rs) | A simple way to view glTF models with Bevy. Just run `cargo run --release --example scene_viewer /path/to/model.gltf#Scene0`, replacing the path as appropriate. With no arguments it will load the FieldHelmet glTF model from the repository assets subdirectory
## Transforms
Example | Description
--- | ---
[3D Rotation](../examples/transforms/3d_rotation.rs) | Illustrates how to (constantly) rotate an object around an axis
[Alignment](../examples/transforms/align.rs) | A demonstration of Transform's axis-alignment feature
[Scale](../examples/transforms/scale.rs) | Illustrates how to scale an object in each direction
[Transform](../examples/transforms/transform.rs) | Shows multiple transformations of objects
[Translation](../examples/transforms/translation.rs) | Illustrates how to move an object along an axis
## UI (User Interface)
Example | Description
--- | ---
[Borders](../examples/ui/borders.rs) | Demonstrates how to create a node with a border
[Box Shadow](../examples/ui/box_shadow.rs) | Demonstrates how to create a node with a shadow
[Button](../examples/ui/button.rs) | Illustrates creating and updating a button
[CSS Grid](../examples/ui/grid.rs) | An example for CSS Grid layout
[Directional Navigation](../examples/ui/directional_navigation.rs) | Demonstration of Directional Navigation between UI elements
[Display and Visibility](../examples/ui/display_and_visibility.rs) | Demonstrates how Display and Visibility work in the UI.
[Flex Layout](../examples/ui/flex_layout.rs) | Demonstrates how the AlignItems and JustifyContent properties can be composed to layout nodes and position text
[Font Atlas Debug](../examples/ui/font_atlas_debug.rs) | Illustrates how FontAtlases are populated (used to optimize text rendering internally)
[Ghost Nodes](../examples/ui/ghost_nodes.rs) | Demonstrates the use of Ghost Nodes to skip entities in the UI layout hierarchy
[Overflow](../examples/ui/overflow.rs) | Simple example demonstrating overflow behavior
[Overflow Clip Margin](../examples/ui/overflow_clip_margin.rs) | Simple example demonstrating the OverflowClipMargin style property
[Overflow and Clipping Debug](../examples/ui/overflow_debug.rs) | An example to debug overflow and clipping behavior
[Relative Cursor Position](../examples/ui/relative_cursor_position.rs) | Showcases the RelativeCursorPosition component
[Render UI to Texture](../examples/ui/render_ui_to_texture.rs) | An example of rendering UI as a part of a 3D world
[Scroll](../examples/ui/scroll.rs) | Demonstrates scrolling UI containers
[Size Constraints](../examples/ui/size_constraints.rs) | Demonstrates how the to use the size constraints to control the size of a UI node.
[Tab Navigation](../examples/ui/tab_navigation.rs) | Demonstration of Tab Navigation between UI elements
[Text](../examples/ui/text.rs) | Illustrates creating and updating text
[Text Debug](../examples/ui/text_debug.rs) | An example for debugging text layout
[Text Wrap Debug](../examples/ui/text_wrap_debug.rs) | Demonstrates text wrapping
[Transparency UI](../examples/ui/transparency_ui.rs) | Demonstrates transparency for UI
[UI Material](../examples/ui/ui_material.rs) | Demonstrates creating and using custom Ui materials
[UI Scaling](../examples/ui/ui_scaling.rs) | Illustrates how to scale the UI
[UI Texture Atlas](../examples/ui/ui_texture_atlas.rs) | Illustrates how to use TextureAtlases in UI
[UI Texture Atlas Slice](../examples/ui/ui_texture_atlas_slice.rs) | Illustrates how to use 9 Slicing for TextureAtlases in UI
[UI Texture Slice](../examples/ui/ui_texture_slice.rs) | Illustrates how to use 9 Slicing in UI
[UI Texture Slice Flipping and Tiling](../examples/ui/ui_texture_slice_flip_and_tile.rs) | Illustrates how to flip and tile images with 9 Slicing in UI
[UI Z-Index](../examples/ui/z_index.rs) | Demonstrates how to control the relative depth (z-position) of UI elements
[Viewport Debug](../examples/ui/viewport_debug.rs) | An example for debugging viewport coordinates
[Window Fallthrough](../examples/ui/window_fallthrough.rs) | Illustrates how to access `winit::window::Window`'s `hittest` functionality.
## Window
Example | Description
--- | ---
[Clear Color](../examples/window/clear_color.rs) | Creates a solid color window
[Custom Cursor Image](../examples/window/custom_cursor_image.rs) | Demonstrates creating an animated custom cursor from an image
[Custom User Event](../examples/window/custom_user_event.rs) | Handles custom user events within the event loop
[Low Power](../examples/window/low_power.rs) | Demonstrates settings to reduce power use for bevy applications
[Monitor info](../examples/window/monitor_info.rs) | Displays information about available monitors (displays).
[Multiple Windows](../examples/window/multiple_windows.rs) | Demonstrates creating multiple windows, and rendering to them
[Scale Factor Override](../examples/window/scale_factor_override.rs) | Illustrates how to customize the default window settings
[Screenshot](../examples/window/screenshot.rs) | Shows how to save screenshots to disk
[Transparent Window](../examples/window/transparent_window.rs) | Illustrates making the window transparent and hiding the window decoration
[Window Drag Move](../examples/window/window_drag_move.rs) | Demonstrates drag move and drag resize without window decoration
[Window Resizing](../examples/window/window_resizing.rs) | Demonstrates resizing and responding to resizing a window
[Window Settings](../examples/window/window_settings.rs) | Demonstrates customizing default window settings
# Tests
Example | Description
--- | ---
[How to Test Apps](../tests/how_to_test_apps.rs) | How to test apps (simple integration testing)
[How to Test Systems](../tests/how_to_test_systems.rs) | How to test systems with commands, queries or resources
# Platform-Specific Examples
## Android
### Setup
```sh
rustup target add aarch64-linux-android
cargo install cargo-ndk
```
The Android SDK must be installed, and the environment variable `ANDROID_SDK_ROOT` set to the root Android `sdk` folder.
When using `NDK (Side by side)`, the environment variable `ANDROID_NDK_ROOT` must also be set to one of the NDKs in `sdk\ndk\[NDK number]`.
Alternatively, you can install Android Studio.
### Build & Run
To build an Android app, you first need to build shared object files for the target architecture with `cargo-ndk`:
```sh
cargo ndk -t <target_name> -o <project_name>/app/src/main/jniLibs build
```
For example, to compile to a 64-bit ARM platform:
```sh
cargo ndk -t arm64-v8a -o android_example/app/src/main/jniLibs build
```
Setting the output path ensures the shared object files can be found in target-specific directories under `jniLibs` where the JNI can find them.
See the `cargo-ndk` [README](https://crates.io/crates/cargo-ndk) for other options.
After this you can build it with `gradlew`:
```sh
./gradlew build
```
Or build it with Android Studio.
Then you can test it in your Android project.
#### About `libc++_shared.so`
Bevy may require `libc++_shared.so` to run on Android, as it is needed by the `oboe` crate, but typically `cargo-ndk` does not copy this file automatically.
To include it, you can manually obtain it from NDK source or use a `build.rs` script for automation, as described in the `cargo-ndk` [README](https://github.com/bbqsrc/cargo-ndk?tab=readme-ov-file#linking-against-and-copying-libc_sharedso-into-the-relevant-places-in-the-output-directory).
Alternatively, you can modify project files to include it when building an APK. To understand the specific steps taken in this project, please refer to the comments within the project files for detailed instructions(`app/CMakeList.txt`, `app/build.gradle`, `app/src/main/cpp/dummy.cpp`).
### Debugging
You can view the logs with the following command:
```sh
adb logcat | grep 'RustStdoutStderr\|bevy\|wgpu'
```
In case of an error getting a GPU or setting it up, you can try settings logs of `wgpu_hal` to `DEBUG` to get more information.
Sometimes, running the app complains about an unknown activity. This may be fixed by uninstalling the application:
```sh
adb uninstall org.bevyengine.example
```
### Old phones
In its examples, Bevy targets the minimum Android API that Play Store <!-- markdown-link-check-disable -->
[requires](https://developer.android.com/distribute/best-practices/develop/target-sdk) to upload and update apps. <!-- markdown-link-check-enable -->
Users of older phones may want to use an older API when testing. By default, Bevy uses [`GameActivity`](https://developer.android.com/games/agdk/game-activity), which only works for Android API level 31 and higher, so if you want to use older API, you need to switch to `NativeActivity`.
To use `NativeActivity`, you need to edit it in `cargo.toml` manually like this:
```toml
bevy = { version = "0.14", default-features = false, features = ["android-native-activity", ...] }
```
Then build it as the [Build & Run](#build--run) section stated above.
#### About `cargo-apk`
You can also build an APK with `cargo-apk`, a simpler and deprecated tool which doesn't support `GameActivity`. If you want to use this, there is a [folder](./mobile/android_basic) inside the mobile example with instructions.
Example | File | Description
--- | --- | ---
`android` | [`mobile/src/lib.rs`](./mobile/src/lib.rs) | A 3d Scene with a button and playing sound
## iOS
### Setup
You need to install the correct rust targets:
- `aarch64-apple-ios`: iOS devices
- `x86_64-apple-ios`: iOS simulator on x86 processors
- `aarch64-apple-ios-sim`: iOS simulator on Apple processors
```sh
rustup target add aarch64-apple-ios x86_64-apple-ios aarch64-apple-ios-sim
```
### Build & Run
Using bash:
```sh
cd examples/mobile
make run
```
In an ideal world, this will boot up, install and run the app for the first
iOS simulator in your `xcrun simctl list devices`. If this fails, you can
specify the simulator device UUID via:
```sh
DEVICE_ID=${YOUR_DEVICE_ID} make run
```
If you'd like to see xcode do stuff, you can run
```sh
open bevy_mobile_example.xcodeproj/
```
which will open xcode. You then must push the zoom zoom play button and wait
for the magic.
Example | File | Description
--- | --- | ---
`ios` | [`mobile/src/lib.rs`](./mobile/src/lib.rs) | A 3d Scene with a button and playing sound
## Wasm
### Setup
```sh
rustup target add wasm32-unknown-unknown
cargo install wasm-bindgen-cli
```
### Build & Run
Following is an example for `lighting`. For other examples, change the `lighting` in the
following commands.
```sh
cargo build --release --example lighting --target wasm32-unknown-unknown
wasm-bindgen --out-name wasm_example \
--out-dir examples/wasm/target \
--target web target/wasm32-unknown-unknown/release/examples/lighting.wasm
```
The first command will build the example for the wasm target, creating a binary. Then,
[wasm-bindgen-cli](https://rustwasm.github.io/wasm-bindgen/reference/cli.html) is used to create
javascript bindings to this wasm file in the output file `examples/wasm/target/wasm_example.js`, which can be loaded using this
[example HTML file](./wasm/index.html).
Then serve `examples/wasm` directory to browser. i.e.
```sh
# cargo install basic-http-server
basic-http-server examples/wasm
# with python
python3 -m http.server --directory examples/wasm
# with ruby
ruby -run -ehttpd examples/wasm
```
#### WebGL2 and WebGPU
Bevy support for WebGPU is being worked on, but is currently experimental.
To build for WebGPU, you'll need to enable the `webgpu` feature. This will override the `webgl2` feature, and builds with the `webgpu` feature enabled won't be able to run on browsers that don't support WebGPU.
Bevy has a helper to build its examples:
- Build for WebGL2: `cargo run -p build-wasm-example -- --api webgl2 load_gltf`
- Build for WebGPU: `cargo run -p build-wasm-example -- --api webgpu load_gltf`
This helper will log the command used to build the examples.
### Audio in the browsers
For the moment, everything is single threaded, this can lead to stuttering when playing audio in browsers. Not all browsers react the same way for all games, you will have to experiment for your game.
In browsers, audio is not authorized to start without being triggered by an user interaction. This is to avoid multiple tabs all starting to auto play some sounds. You can find more context and explanation for this on [Google Chrome blog](https://developer.chrome.com/blog/web-audio-autoplay/). This page also describes a JS workaround to resume audio as soon as the user interact with your game.
### Optimizing
On the web, it's useful to reduce the size of the files that are distributed.
With rust, there are many ways to improve your executable sizes, starting with
the steps described in [the quick-start guide](https://bevyengine.org/learn/quick-start/getting-started/setup/#compile-with-performance-optimizations).
Now, when building the executable, use `--profile wasm-release` instead of `--release`:
```sh
cargo build --profile wasm-release --example lighting --target wasm32-unknown-unknown
```
To apply `wasm-opt`, first locate the `.wasm` file generated in the `--out-dir` of the
earlier `wasm-bindgen-cli` command (the filename should end with `_bg.wasm`), then run:
```sh
wasm-opt -Oz --output optimized.wasm examples/wasm/target/lighting_bg.wasm
mv optimized.wasm examples/wasm/target/lighting_bg.wasm
```
Make sure your final executable size is actually smaller. Some optimizations
may not be worth keeping due to compilation time increases.
For a small project with a basic 3d model and two lights,
the generated file sizes are, as of July 2022, as follows:
profile | wasm-opt | no wasm-opt
----------------------------------|----------|-------------
Default | 8.5M | 13.0M
opt-level = "z" | 6.1M | 12.7M
"z" + lto = "thin" | 5.9M | 12M
"z" + lto = "fat" | 5.1M | 9.4M
"z" + "thin" + codegen-units = 1 | 5.3M | 11M
"z" + "fat" + codegen-units = 1 | 4.8M | 8.5M
### Loading Assets
To load assets, they need to be available in the folder examples/wasm/assets. Cloning this
repository will set it up as a symlink on Linux and macOS, but you will need to manually move
the assets on Windows.

View File

@@ -0,0 +1,129 @@
//! Plays an animation on a skinned glTF model of a fox.
use std::f32::consts::PI;
use bevy::{pbr::CascadeShadowConfigBuilder, prelude::*, scene::SceneInstanceReady};
// An example asset that contains a mesh and animation.
const GLTF_PATH: &str = "models/animated/Fox.glb";
fn main() {
App::new()
.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 2000.,
..default()
})
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup_mesh_and_animation)
.add_systems(Startup, setup_camera_and_environment)
.run();
}
// A component that stores a reference to an animation we want to play. This is
// created when we start loading the mesh (see `setup_mesh_and_animation`) and
// read when the mesh has spawned (see `play_animation_once_loaded`).
#[derive(Component)]
struct AnimationToPlay {
graph_handle: Handle<AnimationGraph>,
index: AnimationNodeIndex,
}
fn setup_mesh_and_animation(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut graphs: ResMut<Assets<AnimationGraph>>,
) {
// Create an animation graph containing a single animation. We want the "run"
// animation from our example asset, which has an index of two.
let (graph, index) = AnimationGraph::from_clip(
asset_server.load(GltfAssetLabel::Animation(2).from_asset(GLTF_PATH)),
);
// Store the animation graph as an asset.
let graph_handle = graphs.add(graph);
// Create a component that stores a reference to our animation.
let animation_to_play = AnimationToPlay {
graph_handle,
index,
};
// Start loading the asset as a scene and store a reference to it in a
// SceneRoot component. This component will automatically spawn a scene
// containing our mesh once it has loaded.
let mesh_scene = SceneRoot(asset_server.load(GltfAssetLabel::Scene(0).from_asset(GLTF_PATH)));
// Spawn an entity with our components, and connect it to an observer that
// will trigger when the scene is loaded and spawned.
commands
.spawn((animation_to_play, mesh_scene))
.observe(play_animation_when_ready);
}
fn play_animation_when_ready(
trigger: Trigger<SceneInstanceReady>,
mut commands: Commands,
children: Query<&Children>,
animations_to_play: Query<&AnimationToPlay>,
mut players: Query<&mut AnimationPlayer>,
) {
// The entity we spawned in `setup_mesh_and_animation` is the trigger's target.
// Start by finding the AnimationToPlay component we added to that entity.
if let Ok(animation_to_play) = animations_to_play.get(trigger.target()) {
// The SceneRoot component will have spawned the scene as a hierarchy
// of entities parented to our entity. Since the asset contained a skinned
// mesh and animations, it will also have spawned an animation player
// component. Search our entity's descendants to find the animation player.
for child in children.iter_descendants(trigger.target()) {
if let Ok(mut player) = players.get_mut(child) {
// Tell the animation player to start the animation and keep
// repeating it.
//
// If you want to try stopping and switching animations, see the
// `animated_mesh_control.rs` example.
player.play(animation_to_play.index).repeat();
// Add the animation graph. This only needs to be done once to
// connect the animation player to the mesh.
commands
.entity(child)
.insert(AnimationGraphHandle(animation_to_play.graph_handle.clone()));
}
}
}
}
// Spawn a camera and a simple environment with a ground plane and light.
fn setup_camera_and_environment(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(100.0, 100.0, 150.0).looking_at(Vec3::new(0.0, 20.0, 0.0), Vec3::Y),
));
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(500000.0, 500000.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// Light
commands.spawn((
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)),
DirectionalLight {
shadows_enabled: true,
..default()
},
CascadeShadowConfigBuilder {
first_cascade_far_bound: 200.0,
maximum_distance: 400.0,
..default()
}
.build(),
));
}

View File

@@ -0,0 +1,210 @@
//! Plays animations from a skinned glTF.
use std::{f32::consts::PI, time::Duration};
use bevy::{animation::RepeatAnimation, pbr::CascadeShadowConfigBuilder, prelude::*};
const FOX_PATH: &str = "models/animated/Fox.glb";
fn main() {
App::new()
.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 2000.,
..default()
})
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, setup_scene_once_loaded)
.add_systems(Update, keyboard_control)
.run();
}
#[derive(Resource)]
struct Animations {
animations: Vec<AnimationNodeIndex>,
graph_handle: Handle<AnimationGraph>,
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut graphs: ResMut<Assets<AnimationGraph>>,
) {
// Build the animation graph
let (graph, node_indices) = AnimationGraph::from_clips([
asset_server.load(GltfAssetLabel::Animation(2).from_asset(FOX_PATH)),
asset_server.load(GltfAssetLabel::Animation(1).from_asset(FOX_PATH)),
asset_server.load(GltfAssetLabel::Animation(0).from_asset(FOX_PATH)),
]);
// Keep our animation graph in a Resource so that it can be inserted onto
// the correct entity once the scene actually loads.
let graph_handle = graphs.add(graph);
commands.insert_resource(Animations {
animations: node_indices,
graph_handle,
});
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(100.0, 100.0, 150.0).looking_at(Vec3::new(0.0, 20.0, 0.0), Vec3::Y),
));
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(500000.0, 500000.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// Light
commands.spawn((
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)),
DirectionalLight {
shadows_enabled: true,
..default()
},
CascadeShadowConfigBuilder {
first_cascade_far_bound: 200.0,
maximum_distance: 400.0,
..default()
}
.build(),
));
// Fox
commands.spawn(SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset(FOX_PATH)),
));
// Instructions
commands.spawn((
Text::new(concat!(
"space: play / pause\n",
"up / down: playback speed\n",
"left / right: seek\n",
"1-3: play N times\n",
"L: loop forever\n",
"return: change animation\n",
)),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
// An `AnimationPlayer` is automatically added to the scene when it's ready.
// When the player is added, start the animation.
fn setup_scene_once_loaded(
mut commands: Commands,
animations: Res<Animations>,
mut players: Query<(Entity, &mut AnimationPlayer), Added<AnimationPlayer>>,
) {
for (entity, mut player) in &mut players {
let mut transitions = AnimationTransitions::new();
// Make sure to start the animation via the `AnimationTransitions`
// component. The `AnimationTransitions` component wants to manage all
// the animations and will get confused if the animations are started
// directly via the `AnimationPlayer`.
transitions
.play(&mut player, animations.animations[0], Duration::ZERO)
.repeat();
commands
.entity(entity)
.insert(AnimationGraphHandle(animations.graph_handle.clone()))
.insert(transitions);
}
}
fn keyboard_control(
keyboard_input: Res<ButtonInput<KeyCode>>,
mut animation_players: Query<(&mut AnimationPlayer, &mut AnimationTransitions)>,
animations: Res<Animations>,
mut current_animation: Local<usize>,
) {
for (mut player, mut transitions) in &mut animation_players {
let Some((&playing_animation_index, _)) = player.playing_animations().next() else {
continue;
};
if keyboard_input.just_pressed(KeyCode::Space) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
if playing_animation.is_paused() {
playing_animation.resume();
} else {
playing_animation.pause();
}
}
if keyboard_input.just_pressed(KeyCode::ArrowUp) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
let speed = playing_animation.speed();
playing_animation.set_speed(speed * 1.2);
}
if keyboard_input.just_pressed(KeyCode::ArrowDown) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
let speed = playing_animation.speed();
playing_animation.set_speed(speed * 0.8);
}
if keyboard_input.just_pressed(KeyCode::ArrowLeft) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
let elapsed = playing_animation.seek_time();
playing_animation.seek_to(elapsed - 0.1);
}
if keyboard_input.just_pressed(KeyCode::ArrowRight) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
let elapsed = playing_animation.seek_time();
playing_animation.seek_to(elapsed + 0.1);
}
if keyboard_input.just_pressed(KeyCode::Enter) {
*current_animation = (*current_animation + 1) % animations.animations.len();
transitions
.play(
&mut player,
animations.animations[*current_animation],
Duration::from_millis(250),
)
.repeat();
}
if keyboard_input.just_pressed(KeyCode::Digit1) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
playing_animation
.set_repeat(RepeatAnimation::Count(1))
.replay();
}
if keyboard_input.just_pressed(KeyCode::Digit2) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
playing_animation
.set_repeat(RepeatAnimation::Count(2))
.replay();
}
if keyboard_input.just_pressed(KeyCode::Digit3) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
playing_animation
.set_repeat(RepeatAnimation::Count(3))
.replay();
}
if keyboard_input.just_pressed(KeyCode::KeyL) {
let playing_animation = player.animation_mut(playing_animation_index).unwrap();
playing_animation.set_repeat(RepeatAnimation::Forever);
}
}
}

View File

@@ -0,0 +1,292 @@
//! Plays animations from a skinned glTF.
use std::{f32::consts::PI, time::Duration};
use bevy::{
animation::AnimationTargetId, color::palettes::css::WHITE, pbr::CascadeShadowConfigBuilder,
prelude::*,
};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
const FOX_PATH: &str = "models/animated/Fox.glb";
fn main() {
App::new()
.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 2000.,
..default()
})
.add_plugins(DefaultPlugins)
.init_resource::<ParticleAssets>()
.init_resource::<FoxFeetTargets>()
.add_systems(Startup, setup)
.add_systems(Update, setup_scene_once_loaded)
.add_systems(Update, simulate_particles)
.add_observer(observe_on_step)
.run();
}
#[derive(Resource)]
struct SeededRng(ChaCha8Rng);
#[derive(Resource)]
struct Animations {
index: AnimationNodeIndex,
graph_handle: Handle<AnimationGraph>,
}
#[derive(Event, Reflect, Clone)]
struct OnStep;
fn observe_on_step(
trigger: Trigger<OnStep>,
particle: Res<ParticleAssets>,
mut commands: Commands,
transforms: Query<&GlobalTransform>,
mut seeded_rng: ResMut<SeededRng>,
) {
let translation = transforms.get(trigger.target()).unwrap().translation();
// Spawn a bunch of particles.
for _ in 0..14 {
let horizontal = seeded_rng.0.r#gen::<Dir2>() * seeded_rng.0.gen_range(8.0..12.0);
let vertical = seeded_rng.0.gen_range(0.0..4.0);
let size = seeded_rng.0.gen_range(0.2..1.0);
commands.spawn((
Particle {
lifetime_timer: Timer::from_seconds(
seeded_rng.0.gen_range(0.2..0.6),
TimerMode::Once,
),
size,
velocity: Vec3::new(horizontal.x, vertical, horizontal.y) * 10.0,
},
Mesh3d(particle.mesh.clone()),
MeshMaterial3d(particle.material.clone()),
Transform {
translation,
scale: Vec3::splat(size),
..Default::default()
},
));
}
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut graphs: ResMut<Assets<AnimationGraph>>,
) {
// Build the animation graph
let (graph, index) = AnimationGraph::from_clip(
// We specifically want the "run" animation, which is the third one.
asset_server.load(GltfAssetLabel::Animation(2).from_asset(FOX_PATH)),
);
// Insert a resource with the current scene information
let graph_handle = graphs.add(graph);
commands.insert_resource(Animations {
index,
graph_handle,
});
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(100.0, 100.0, 150.0).looking_at(Vec3::new(0.0, 20.0, 0.0), Vec3::Y),
));
// Plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(500000.0, 500000.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
));
// Light
commands.spawn((
Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)),
DirectionalLight {
shadows_enabled: true,
..default()
},
CascadeShadowConfigBuilder {
first_cascade_far_bound: 200.0,
maximum_distance: 400.0,
..default()
}
.build(),
));
// Fox
commands.spawn(SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset(FOX_PATH)),
));
// We're seeding the PRNG here to make this example deterministic for testing purposes.
// This isn't strictly required in practical use unless you need your app to be deterministic.
let seeded_rng = ChaCha8Rng::seed_from_u64(19878367467712);
commands.insert_resource(SeededRng(seeded_rng));
}
// An `AnimationPlayer` is automatically added to the scene when it's ready.
// When the player is added, start the animation.
fn setup_scene_once_loaded(
mut commands: Commands,
animations: Res<Animations>,
feet: Res<FoxFeetTargets>,
graphs: Res<Assets<AnimationGraph>>,
mut clips: ResMut<Assets<AnimationClip>>,
mut players: Query<(Entity, &mut AnimationPlayer), Added<AnimationPlayer>>,
) {
fn get_clip<'a>(
node: AnimationNodeIndex,
graph: &AnimationGraph,
clips: &'a mut Assets<AnimationClip>,
) -> &'a mut AnimationClip {
let node = graph.get(node).unwrap();
let clip = match &node.node_type {
AnimationNodeType::Clip(handle) => clips.get_mut(handle),
_ => unreachable!(),
};
clip.unwrap()
}
for (entity, mut player) in &mut players {
// Send `OnStep` events once the fox feet hits the ground in the running animation.
let graph = graphs.get(&animations.graph_handle).unwrap();
let running_animation = get_clip(animations.index, graph, &mut clips);
// You can determine the time an event should trigger if you know witch frame it occurs and
// the frame rate of the animation. Let's say we want to trigger an event at frame 15,
// and the animation has a frame rate of 24 fps, then time = 15 / 24 = 0.625.
running_animation.add_event_to_target(feet.front_left, 0.625, OnStep);
running_animation.add_event_to_target(feet.front_right, 0.5, OnStep);
running_animation.add_event_to_target(feet.back_left, 0.0, OnStep);
running_animation.add_event_to_target(feet.back_right, 0.125, OnStep);
// Start the animation
let mut transitions = AnimationTransitions::new();
// Make sure to start the animation via the `AnimationTransitions`
// component. The `AnimationTransitions` component wants to manage all
// the animations and will get confused if the animations are started
// directly via the `AnimationPlayer`.
transitions
.play(&mut player, animations.index, Duration::ZERO)
.repeat();
commands
.entity(entity)
.insert(AnimationGraphHandle(animations.graph_handle.clone()))
.insert(transitions);
}
}
fn simulate_particles(
mut commands: Commands,
mut query: Query<(Entity, &mut Transform, &mut Particle)>,
time: Res<Time>,
) {
for (entity, mut transform, mut particle) in &mut query {
if particle.lifetime_timer.tick(time.delta()).just_finished() {
commands.entity(entity).despawn();
return;
}
transform.translation += particle.velocity * time.delta_secs();
transform.scale = Vec3::splat(particle.size.lerp(0.0, particle.lifetime_timer.fraction()));
particle
.velocity
.smooth_nudge(&Vec3::ZERO, 4.0, time.delta_secs());
}
}
#[derive(Component)]
struct Particle {
lifetime_timer: Timer,
size: f32,
velocity: Vec3,
}
#[derive(Resource)]
struct ParticleAssets {
mesh: Handle<Mesh>,
material: Handle<StandardMaterial>,
}
impl FromWorld for ParticleAssets {
fn from_world(world: &mut World) -> Self {
Self {
mesh: world.add_asset::<Mesh>(Sphere::new(10.0)),
material: world.add_asset::<StandardMaterial>(StandardMaterial {
base_color: WHITE.into(),
..Default::default()
}),
}
}
}
/// Stores the `AnimationTargetId`s of the fox's feet
#[derive(Resource)]
struct FoxFeetTargets {
front_right: AnimationTargetId,
front_left: AnimationTargetId,
back_left: AnimationTargetId,
back_right: AnimationTargetId,
}
impl Default for FoxFeetTargets {
fn default() -> Self {
let hip_node = ["root", "_rootJoint", "b_Root_00", "b_Hip_01"];
let front_left_foot = hip_node.iter().chain(
[
"b_Spine01_02",
"b_Spine02_03",
"b_LeftUpperArm_09",
"b_LeftForeArm_010",
"b_LeftHand_011",
]
.iter(),
);
let front_right_foot = hip_node.iter().chain(
[
"b_Spine01_02",
"b_Spine02_03",
"b_RightUpperArm_06",
"b_RightForeArm_07",
"b_RightHand_08",
]
.iter(),
);
let back_left_foot = hip_node.iter().chain(
[
"b_LeftLeg01_015",
"b_LeftLeg02_016",
"b_LeftFoot01_017",
"b_LeftFoot02_018",
]
.iter(),
);
let back_right_foot = hip_node.iter().chain(
[
"b_RightLeg01_019",
"b_RightLeg02_020",
"b_RightFoot01_021",
"b_RightFoot02_022",
]
.iter(),
);
Self {
front_left: AnimationTargetId::from_iter(front_left_foot),
front_right: AnimationTargetId::from_iter(front_right_foot),
back_left: AnimationTargetId::from_iter(back_left_foot),
back_right: AnimationTargetId::from_iter(back_right_foot),
}
}
}

View File

@@ -0,0 +1,185 @@
//! Create and play an animation defined by code that operates on the [`Transform`] component.
use std::f32::consts::PI;
use bevy::{
animation::{animated_field, AnimationTarget, AnimationTargetId},
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 150.0,
..default()
})
.add_systems(Startup, setup)
.run();
}
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut animations: ResMut<Assets<AnimationClip>>,
mut graphs: ResMut<Assets<AnimationGraph>>,
) {
// Camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// Light
commands.spawn((
PointLight {
intensity: 500_000.0,
..default()
},
Transform::from_xyz(0.0, 2.5, 0.0),
));
// Let's use the `Name` component to target entities. We can use anything we
// like, but names are convenient.
let planet = Name::new("planet");
let orbit_controller = Name::new("orbit_controller");
let satellite = Name::new("satellite");
// Creating the animation
let mut animation = AnimationClip::default();
// A curve can modify a single part of a transform: here, the translation.
let planet_animation_target_id = AnimationTargetId::from_name(&planet);
animation.add_curve_to_target(
planet_animation_target_id,
AnimatableCurve::new(
animated_field!(Transform::translation),
UnevenSampleAutoCurve::new([0.0, 1.0, 2.0, 3.0, 4.0].into_iter().zip([
Vec3::new(1.0, 0.0, 1.0),
Vec3::new(-1.0, 0.0, 1.0),
Vec3::new(-1.0, 0.0, -1.0),
Vec3::new(1.0, 0.0, -1.0),
// in case seamless looping is wanted, the last keyframe should
// be the same as the first one
Vec3::new(1.0, 0.0, 1.0),
]))
.expect("should be able to build translation curve because we pass in valid samples"),
),
);
// Or it can modify the rotation of the transform.
// To find the entity to modify, the hierarchy will be traversed looking for
// an entity with the right name at each level.
let orbit_controller_animation_target_id =
AnimationTargetId::from_names([planet.clone(), orbit_controller.clone()].iter());
animation.add_curve_to_target(
orbit_controller_animation_target_id,
AnimatableCurve::new(
animated_field!(Transform::rotation),
UnevenSampleAutoCurve::new([0.0, 1.0, 2.0, 3.0, 4.0].into_iter().zip([
Quat::IDENTITY,
Quat::from_axis_angle(Vec3::Y, PI / 2.),
Quat::from_axis_angle(Vec3::Y, PI / 2. * 2.),
Quat::from_axis_angle(Vec3::Y, PI / 2. * 3.),
Quat::IDENTITY,
]))
.expect("Failed to build rotation curve"),
),
);
// If a curve in an animation is shorter than the other, it will not repeat
// until all other curves are finished. In that case, another animation should
// be created for each part that would have a different duration / period.
let satellite_animation_target_id = AnimationTargetId::from_names(
[planet.clone(), orbit_controller.clone(), satellite.clone()].iter(),
);
animation.add_curve_to_target(
satellite_animation_target_id,
AnimatableCurve::new(
animated_field!(Transform::scale),
UnevenSampleAutoCurve::new(
[0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0]
.into_iter()
.zip([
Vec3::splat(0.8),
Vec3::splat(1.2),
Vec3::splat(0.8),
Vec3::splat(1.2),
Vec3::splat(0.8),
Vec3::splat(1.2),
Vec3::splat(0.8),
Vec3::splat(1.2),
Vec3::splat(0.8),
]),
)
.expect("Failed to build scale curve"),
),
);
// There can be more than one curve targeting the same entity path.
animation.add_curve_to_target(
AnimationTargetId::from_names(
[planet.clone(), orbit_controller.clone(), satellite.clone()].iter(),
),
AnimatableCurve::new(
animated_field!(Transform::rotation),
UnevenSampleAutoCurve::new([0.0, 1.0, 2.0, 3.0, 4.0].into_iter().zip([
Quat::IDENTITY,
Quat::from_axis_angle(Vec3::Y, PI / 2.),
Quat::from_axis_angle(Vec3::Y, PI / 2. * 2.),
Quat::from_axis_angle(Vec3::Y, PI / 2. * 3.),
Quat::IDENTITY,
]))
.expect("should be able to build translation curve because we pass in valid samples"),
),
);
// Create the animation graph
let (graph, animation_index) = AnimationGraph::from_clip(animations.add(animation));
// Create the animation player, and set it to repeat
let mut player = AnimationPlayer::default();
player.play(animation_index).repeat();
// Create the scene that will be animated
// First entity is the planet
let planet_entity = commands
.spawn((
Mesh3d(meshes.add(Sphere::default())),
MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
// Add the animation graph and player
planet,
AnimationGraphHandle(graphs.add(graph)),
player,
))
.id();
commands
.entity(planet_entity)
.insert(AnimationTarget {
id: planet_animation_target_id,
player: planet_entity,
})
.with_children(|p| {
// This entity is just used for animation, but doesn't display anything
p.spawn((
Transform::default(),
Visibility::default(),
orbit_controller,
AnimationTarget {
id: orbit_controller_animation_target_id,
player: planet_entity,
},
))
.with_children(|p| {
// The satellite, placed at a distance of the planet
p.spawn((
Mesh3d(meshes.add(Cuboid::new(0.5, 0.5, 0.5))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.9, 0.3))),
Transform::from_xyz(1.5, 0.0, 0.0),
AnimationTarget {
id: satellite_animation_target_id,
player: planet_entity,
},
satellite,
));
});
});
}

View File

@@ -0,0 +1,197 @@
//! Shows how to use animation clips to animate UI properties.
use bevy::{
animation::{
animated_field, AnimationEntityMut, AnimationEvaluationError, AnimationTarget,
AnimationTargetId,
},
prelude::*,
};
use std::any::TypeId;
// Holds information about the animation we programmatically create.
struct AnimationInfo {
// The name of the animation target (in this case, the text).
target_name: Name,
// The ID of the animation target, derived from the name.
target_id: AnimationTargetId,
// The animation graph asset.
graph: Handle<AnimationGraph>,
// The index of the node within that graph.
node_index: AnimationNodeIndex,
}
// The entry point.
fn main() {
App::new()
.add_plugins(DefaultPlugins)
// Note that we don't need any systems other than the setup system,
// because Bevy automatically updates animations every frame.
.add_systems(Startup, setup)
.run();
}
impl AnimationInfo {
// Programmatically creates the UI animation.
fn create(
animation_graphs: &mut Assets<AnimationGraph>,
animation_clips: &mut Assets<AnimationClip>,
) -> AnimationInfo {
// Create an ID that identifies the text node we're going to animate.
let animation_target_name = Name::new("Text");
let animation_target_id = AnimationTargetId::from_name(&animation_target_name);
// Allocate an animation clip.
let mut animation_clip = AnimationClip::default();
// Create a curve that animates font size.
animation_clip.add_curve_to_target(
animation_target_id,
AnimatableCurve::new(
animated_field!(TextFont::font_size),
AnimatableKeyframeCurve::new(
[0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0]
.into_iter()
.zip([24.0, 80.0, 24.0, 80.0, 24.0, 80.0, 24.0]),
)
.expect(
"should be able to build translation curve because we pass in valid samples",
),
),
);
// Create a curve that animates font color. Note that this should have
// the same time duration as the previous curve.
//
// This time we use a "custom property", which in this case animates TextColor under the assumption
// that it is in the "srgba" format.
animation_clip.add_curve_to_target(
animation_target_id,
AnimatableCurve::new(
TextColorProperty,
AnimatableKeyframeCurve::new([0.0, 1.0, 2.0, 3.0].into_iter().zip([
Srgba::RED,
Srgba::GREEN,
Srgba::BLUE,
Srgba::RED,
]))
.expect(
"should be able to build translation curve because we pass in valid samples",
),
),
);
// Save our animation clip as an asset.
let animation_clip_handle = animation_clips.add(animation_clip);
// Create an animation graph with that clip.
let (animation_graph, animation_node_index) =
AnimationGraph::from_clip(animation_clip_handle);
let animation_graph_handle = animation_graphs.add(animation_graph);
AnimationInfo {
target_name: animation_target_name,
target_id: animation_target_id,
graph: animation_graph_handle,
node_index: animation_node_index,
}
}
}
// Creates all the entities in the scene.
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut animation_graphs: ResMut<Assets<AnimationGraph>>,
mut animation_clips: ResMut<Assets<AnimationClip>>,
) {
// Create the animation.
let AnimationInfo {
target_name: animation_target_name,
target_id: animation_target_id,
graph: animation_graph,
node_index: animation_node_index,
} = AnimationInfo::create(&mut animation_graphs, &mut animation_clips);
// Build an animation player that automatically plays the UI animation.
let mut animation_player = AnimationPlayer::default();
animation_player.play(animation_node_index).repeat();
// Add a camera.
commands.spawn(Camera2d);
// Build the UI. We have a parent node that covers the whole screen and
// contains the `AnimationPlayer`, as well as a child node that contains the
// text to be animated.
commands
.spawn((
// Cover the whole screen, and center contents.
Node {
position_type: PositionType::Absolute,
top: Val::Px(0.0),
left: Val::Px(0.0),
right: Val::Px(0.0),
bottom: Val::Px(0.0),
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
..default()
},
animation_player,
AnimationGraphHandle(animation_graph),
))
.with_children(|builder| {
// Build the text node.
let player = builder.target_entity();
builder
.spawn((
Text::new("Bevy"),
TextFont {
font: asset_server.load("fonts/FiraSans-Bold.ttf"),
font_size: 24.0,
..default()
},
TextColor(Color::Srgba(Srgba::RED)),
TextLayout::new_with_justify(JustifyText::Center),
))
// Mark as an animation target.
.insert(AnimationTarget {
id: animation_target_id,
player,
})
.insert(animation_target_name);
});
}
// A type that represents the color of the first text section.
//
// We implement `AnimatableProperty` on this to define custom property accessor logic
#[derive(Clone)]
struct TextColorProperty;
impl AnimatableProperty for TextColorProperty {
type Property = Srgba;
fn evaluator_id(&self) -> EvaluatorId {
EvaluatorId::Type(TypeId::of::<Self>())
}
fn get_mut<'a>(
&self,
entity: &'a mut AnimationEntityMut,
) -> Result<&'a mut Self::Property, AnimationEvaluationError> {
let text_color = entity
.get_mut::<TextColor>()
.ok_or(AnimationEvaluationError::ComponentNotPresent(TypeId::of::<
TextColor,
>(
)))?
.into_inner();
match text_color.0 {
Color::Srgba(ref mut color) => Ok(color),
_ => Err(AnimationEvaluationError::PropertyNotPresent(TypeId::of::<
Srgba,
>(
))),
}
}
}

View File

@@ -0,0 +1,104 @@
//! Demonstrate how to use animation events.
use bevy::{
color::palettes::css::{ALICE_BLUE, BLACK, CRIMSON},
core_pipeline::bloom::Bloom,
prelude::*,
};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_event::<MessageEvent>()
.add_systems(Startup, setup)
.add_systems(Update, animate_text_opacity)
.add_observer(edit_message)
.run();
}
#[derive(Component)]
struct MessageText;
#[derive(Event, Clone)]
struct MessageEvent {
value: String,
color: Color,
}
fn edit_message(
trigger: Trigger<MessageEvent>,
text: Single<(&mut Text2d, &mut TextColor), With<MessageText>>,
) {
let (mut text, mut color) = text.into_inner();
text.0 = trigger.event().value.clone();
color.0 = trigger.event().color;
}
fn setup(
mut commands: Commands,
mut animations: ResMut<Assets<AnimationClip>>,
mut graphs: ResMut<Assets<AnimationGraph>>,
) {
// Camera
commands.spawn((
Camera2d,
Camera {
clear_color: ClearColorConfig::Custom(BLACK.into()),
hdr: true,
..Default::default()
},
Bloom {
intensity: 0.4,
..Bloom::NATURAL
},
));
// The text that will be changed by animation events.
commands.spawn((
MessageText,
Text2d::default(),
TextFont {
font_size: 119.0,
..default()
},
TextColor(Color::NONE),
));
// Create a new animation clip.
let mut animation = AnimationClip::default();
// This is only necessary if you want the duration of the
// animation to be longer than the last event in the clip.
animation.set_duration(2.0);
// Add events at the specified time.
animation.add_event(
0.0,
MessageEvent {
value: "HELLO".into(),
color: ALICE_BLUE.into(),
},
);
animation.add_event(
1.0,
MessageEvent {
value: "BYE".into(),
color: CRIMSON.into(),
},
);
// Create the animation graph.
let (graph, animation_index) = AnimationGraph::from_clip(animations.add(animation));
let mut player = AnimationPlayer::default();
player.play(animation_index).repeat();
commands.spawn((AnimationGraphHandle(graphs.add(graph)), player));
}
// Slowly fade out the text opacity.
fn animate_text_opacity(mut colors: Query<&mut TextColor>, time: Res<Time>) {
for mut color in &mut colors {
let a = color.0.alpha();
color.0.set_alpha(a - time.delta_secs());
}
}

View File

@@ -0,0 +1,557 @@
//! Demonstrates animation blending with animation graphs.
//!
//! The animation graph is shown on screen. You can change the weights of the
//! playing animations by clicking and dragging left or right within the nodes.
use bevy::{
color::palettes::{
basic::WHITE,
css::{ANTIQUE_WHITE, DARK_GREEN},
},
prelude::*,
ui::RelativeCursorPosition,
};
use argh::FromArgs;
#[cfg(not(target_arch = "wasm32"))]
use {
bevy::{asset::io::file::FileAssetReader, tasks::IoTaskPool},
ron::ser::PrettyConfig,
std::{fs::File, path::Path},
};
/// Where to find the serialized animation graph.
static ANIMATION_GRAPH_PATH: &str = "animation_graphs/Fox.animgraph.ron";
/// The indices of the nodes containing animation clips in the graph.
static CLIP_NODE_INDICES: [u32; 3] = [2, 3, 4];
/// The help text in the upper left corner.
static HELP_TEXT: &str = "Click and drag an animation clip node to change its weight";
/// The node widgets in the UI.
static NODE_TYPES: [NodeType; 5] = [
NodeType::Clip(ClipNode::new("Idle", 0)),
NodeType::Clip(ClipNode::new("Walk", 1)),
NodeType::Blend("Root"),
NodeType::Blend("Blend\n0.5"),
NodeType::Clip(ClipNode::new("Run", 2)),
];
/// The positions of the node widgets in the UI.
///
/// These are in the same order as [`NODE_TYPES`] above.
static NODE_RECTS: [NodeRect; 5] = [
NodeRect::new(10.00, 10.00, 97.64, 48.41),
NodeRect::new(10.00, 78.41, 97.64, 48.41),
NodeRect::new(286.08, 78.41, 97.64, 48.41),
NodeRect::new(148.04, 112.61, 97.64, 48.41), // was 44.20
NodeRect::new(10.00, 146.82, 97.64, 48.41),
];
/// The positions of the horizontal lines in the UI.
static HORIZONTAL_LINES: [Line; 6] = [
Line::new(107.64, 34.21, 158.24),
Line::new(107.64, 102.61, 20.20),
Line::new(107.64, 171.02, 20.20),
Line::new(127.84, 136.82, 20.20),
Line::new(245.68, 136.82, 20.20),
Line::new(265.88, 102.61, 20.20),
];
/// The positions of the vertical lines in the UI.
static VERTICAL_LINES: [Line; 2] = [
Line::new(127.83, 102.61, 68.40),
Line::new(265.88, 34.21, 102.61),
];
/// Initializes the app.
fn main() {
#[cfg(not(target_arch = "wasm32"))]
let args: Args = argh::from_env();
#[cfg(target_arch = "wasm32")]
let args = Args::from_args(&[], &[]).unwrap();
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Animation Graph Example".into(),
..default()
}),
..default()
}))
.add_systems(Startup, (setup_assets, setup_scene, setup_ui))
.add_systems(Update, init_animations)
.add_systems(
Update,
(handle_weight_drag, update_ui, sync_weights).chain(),
)
.insert_resource(args)
.insert_resource(AmbientLight {
color: WHITE.into(),
brightness: 100.0,
..default()
})
.run();
}
/// Demonstrates animation blending with animation graphs
#[derive(FromArgs, Resource)]
struct Args {
/// disables loading of the animation graph asset from disk
#[argh(switch)]
no_load: bool,
/// regenerates the asset file; implies `--no-load`
#[argh(switch)]
save: bool,
}
/// The [`AnimationGraph`] asset, which specifies how the animations are to
/// be blended together.
#[derive(Clone, Resource)]
struct ExampleAnimationGraph(Handle<AnimationGraph>);
/// The current weights of the three playing animations.
#[derive(Component)]
struct ExampleAnimationWeights {
/// The weights of the three playing animations.
weights: [f32; 3],
}
/// Initializes the scene.
fn setup_assets(
mut commands: Commands,
mut asset_server: ResMut<AssetServer>,
mut animation_graphs: ResMut<Assets<AnimationGraph>>,
args: Res<Args>,
) {
// Create or load the assets.
if args.no_load || args.save {
setup_assets_programmatically(
&mut commands,
&mut asset_server,
&mut animation_graphs,
args.save,
);
} else {
setup_assets_via_serialized_animation_graph(&mut commands, &mut asset_server);
}
}
fn setup_ui(mut commands: Commands) {
setup_help_text(&mut commands);
setup_node_rects(&mut commands);
setup_node_lines(&mut commands);
}
/// Creates the assets programmatically, including the animation graph.
/// Optionally saves them to disk if `save` is present (corresponding to the
/// `--save` option).
fn setup_assets_programmatically(
commands: &mut Commands,
asset_server: &mut AssetServer,
animation_graphs: &mut Assets<AnimationGraph>,
_save: bool,
) {
// Create the nodes.
let mut animation_graph = AnimationGraph::new();
let blend_node = animation_graph.add_blend(0.5, animation_graph.root);
animation_graph.add_clip(
asset_server.load(GltfAssetLabel::Animation(0).from_asset("models/animated/Fox.glb")),
1.0,
animation_graph.root,
);
animation_graph.add_clip(
asset_server.load(GltfAssetLabel::Animation(1).from_asset("models/animated/Fox.glb")),
1.0,
blend_node,
);
animation_graph.add_clip(
asset_server.load(GltfAssetLabel::Animation(2).from_asset("models/animated/Fox.glb")),
1.0,
blend_node,
);
// If asked to save, do so.
#[cfg(not(target_arch = "wasm32"))]
if _save {
let animation_graph = animation_graph.clone();
IoTaskPool::get()
.spawn(async move {
let mut animation_graph_writer = File::create(Path::join(
&FileAssetReader::get_base_path(),
Path::join(Path::new("assets"), Path::new(ANIMATION_GRAPH_PATH)),
))
.expect("Failed to open the animation graph asset");
ron::ser::to_writer_pretty(
&mut animation_graph_writer,
&animation_graph,
PrettyConfig::default(),
)
.expect("Failed to serialize the animation graph");
})
.detach();
}
// Add the graph.
let handle = animation_graphs.add(animation_graph);
// Save the assets in a resource.
commands.insert_resource(ExampleAnimationGraph(handle));
}
fn setup_assets_via_serialized_animation_graph(
commands: &mut Commands,
asset_server: &mut AssetServer,
) {
commands.insert_resource(ExampleAnimationGraph(
asset_server.load(ANIMATION_GRAPH_PATH),
));
}
/// Spawns the animated fox.
fn setup_scene(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-10.0, 5.0, 13.0).looking_at(Vec3::new(0., 1., 0.), Vec3::Y),
));
commands.spawn((
PointLight {
intensity: 10_000_000.0,
shadows_enabled: true,
..default()
},
Transform::from_xyz(-4.0, 8.0, 13.0),
));
commands.spawn((
SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")),
),
Transform::from_scale(Vec3::splat(0.07)),
));
// Ground
commands.spawn((
Mesh3d(meshes.add(Circle::new(7.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)),
));
}
/// Places the help text at the top left of the window.
fn setup_help_text(commands: &mut Commands) {
commands.spawn((
Text::new(HELP_TEXT),
Node {
position_type: PositionType::Absolute,
top: Val::Px(12.0),
left: Val::Px(12.0),
..default()
},
));
}
/// Initializes the node UI widgets.
fn setup_node_rects(commands: &mut Commands) {
for (node_rect, node_type) in NODE_RECTS.iter().zip(NODE_TYPES.iter()) {
let node_string = match *node_type {
NodeType::Clip(ref clip) => clip.text,
NodeType::Blend(text) => text,
};
let text = commands
.spawn((
Text::new(node_string),
TextFont {
font_size: 16.0,
..default()
},
TextColor(ANTIQUE_WHITE.into()),
TextLayout::new_with_justify(JustifyText::Center),
))
.id();
let container = {
let mut container = commands.spawn((
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(node_rect.bottom),
left: Val::Px(node_rect.left),
height: Val::Px(node_rect.height),
width: Val::Px(node_rect.width),
align_items: AlignItems::Center,
justify_items: JustifyItems::Center,
align_content: AlignContent::Center,
justify_content: JustifyContent::Center,
..default()
},
BorderColor(WHITE.into()),
Outline::new(Val::Px(1.), Val::ZERO, Color::WHITE),
));
if let NodeType::Clip(clip) = node_type {
container.insert((
Interaction::None,
RelativeCursorPosition::default(),
(*clip).clone(),
));
}
container.id()
};
// Create the background color.
if let NodeType::Clip(_) = node_type {
let background = commands
.spawn((
Node {
position_type: PositionType::Absolute,
top: Val::Px(0.),
left: Val::Px(0.),
height: Val::Px(node_rect.height),
width: Val::Px(node_rect.width),
..default()
},
BackgroundColor(DARK_GREEN.into()),
))
.id();
commands.entity(container).add_child(background);
}
commands.entity(container).add_child(text);
}
}
/// Creates boxes for the horizontal and vertical lines.
///
/// This is a bit hacky: it uses 1-pixel-wide and 1-pixel-high boxes to draw
/// vertical and horizontal lines, respectively.
fn setup_node_lines(commands: &mut Commands) {
for line in &HORIZONTAL_LINES {
commands.spawn((
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(line.bottom),
left: Val::Px(line.left),
height: Val::Px(0.0),
width: Val::Px(line.length),
border: UiRect::bottom(Val::Px(1.0)),
..default()
},
BorderColor(WHITE.into()),
));
}
for line in &VERTICAL_LINES {
commands.spawn((
Node {
position_type: PositionType::Absolute,
bottom: Val::Px(line.bottom),
left: Val::Px(line.left),
height: Val::Px(line.length),
width: Val::Px(0.0),
border: UiRect::left(Val::Px(1.0)),
..default()
},
BorderColor(WHITE.into()),
));
}
}
/// Attaches the animation graph to the scene, and plays all three animations.
fn init_animations(
mut commands: Commands,
mut query: Query<(Entity, &mut AnimationPlayer)>,
animation_graph: Res<ExampleAnimationGraph>,
mut done: Local<bool>,
) {
if *done {
return;
}
for (entity, mut player) in query.iter_mut() {
commands.entity(entity).insert((
AnimationGraphHandle(animation_graph.0.clone()),
ExampleAnimationWeights::default(),
));
for &node_index in &CLIP_NODE_INDICES {
player.play(node_index.into()).repeat();
}
*done = true;
}
}
/// Read cursor position relative to clip nodes, allowing the user to change weights
/// when dragging the node UI widgets.
fn handle_weight_drag(
mut interaction_query: Query<(&Interaction, &RelativeCursorPosition, &ClipNode)>,
mut animation_weights_query: Query<&mut ExampleAnimationWeights>,
) {
for (interaction, relative_cursor, clip_node) in &mut interaction_query {
if !matches!(*interaction, Interaction::Pressed) {
continue;
}
let Some(pos) = relative_cursor.normalized else {
continue;
};
for mut animation_weights in animation_weights_query.iter_mut() {
animation_weights.weights[clip_node.index] = pos.x.clamp(0., 1.);
}
}
}
// Updates the UI based on the weights that the user has chosen.
fn update_ui(
mut text_query: Query<&mut Text>,
mut background_query: Query<&mut Node, Without<Text>>,
container_query: Query<(&Children, &ClipNode)>,
animation_weights_query: Query<&ExampleAnimationWeights, Changed<ExampleAnimationWeights>>,
) {
for animation_weights in animation_weights_query.iter() {
for (children, clip_node) in &container_query {
// Draw the green background color to visually indicate the weight.
let mut bg_iter = background_query.iter_many_mut(children);
if let Some(mut node) = bg_iter.fetch_next() {
// All nodes are the same width, so `NODE_RECTS[0]` is as good as any other.
node.width =
Val::Px(NODE_RECTS[0].width * animation_weights.weights[clip_node.index]);
}
// Update the node labels with the current weights.
let mut text_iter = text_query.iter_many_mut(children);
if let Some(mut text) = text_iter.fetch_next() {
**text = format!(
"{}\n{:.2}",
clip_node.text, animation_weights.weights[clip_node.index]
);
}
}
}
}
/// Takes the weights that were set in the UI and assigns them to the actual
/// playing animation.
fn sync_weights(mut query: Query<(&mut AnimationPlayer, &ExampleAnimationWeights)>) {
for (mut animation_player, animation_weights) in query.iter_mut() {
for (&animation_node_index, &animation_weight) in CLIP_NODE_INDICES
.iter()
.zip(animation_weights.weights.iter())
{
// If the animation happens to be no longer active, restart it.
if !animation_player.is_playing_animation(animation_node_index.into()) {
animation_player.play(animation_node_index.into());
}
// Set the weight.
if let Some(active_animation) =
animation_player.animation_mut(animation_node_index.into())
{
active_animation.set_weight(animation_weight);
}
}
}
}
/// An on-screen representation of a node.
#[derive(Debug)]
struct NodeRect {
/// The number of pixels that this rectangle is from the left edge of the
/// window.
left: f32,
/// The number of pixels that this rectangle is from the bottom edge of the
/// window.
bottom: f32,
/// The width of this rectangle in pixels.
width: f32,
/// The height of this rectangle in pixels.
height: f32,
}
/// Either a straight horizontal or a straight vertical line on screen.
///
/// The line starts at (`left`, `bottom`) and goes either right (if the line is
/// horizontal) or down (if the line is vertical).
struct Line {
/// The number of pixels that the start of this line is from the left edge
/// of the screen.
left: f32,
/// The number of pixels that the start of this line is from the bottom edge
/// of the screen.
bottom: f32,
/// The length of the line.
length: f32,
}
/// The type of each node in the UI: either a clip node or a blend node.
enum NodeType {
/// A clip node, which specifies an animation.
Clip(ClipNode),
/// A blend node with no animation and a string label.
Blend(&'static str),
}
/// The label for the UI representation of a clip node.
#[derive(Clone, Component)]
struct ClipNode {
/// The string label of the node.
text: &'static str,
/// Which of the three animations this UI widget represents.
index: usize,
}
impl Default for ExampleAnimationWeights {
fn default() -> Self {
Self { weights: [1.0; 3] }
}
}
impl ClipNode {
/// Creates a new [`ClipNodeText`] from a label and the animation index.
const fn new(text: &'static str, index: usize) -> Self {
Self { text, index }
}
}
impl NodeRect {
/// Creates a new [`NodeRect`] from the lower-left corner and size.
///
/// Note that node rectangles are anchored in the *lower*-left corner. The
/// `bottom` parameter specifies vertical distance from the *bottom* of the
/// window.
const fn new(left: f32, bottom: f32, width: f32, height: f32) -> NodeRect {
NodeRect {
left,
bottom,
width,
height,
}
}
}
impl Line {
/// Creates a new [`Line`], either horizontal or vertical.
///
/// Note that the line's start point is anchored in the lower-*left* corner,
/// and that the `length` extends either to the right or downward.
const fn new(left: f32, bottom: f32, length: f32) -> Self {
Self {
left,
bottom,
length,
}
}
}

View File

@@ -0,0 +1,496 @@
//! Demonstrates how to use masks to limit the scope of animations.
use bevy::{
animation::{AnimationTarget, AnimationTargetId},
color::palettes::css::{LIGHT_GRAY, WHITE},
prelude::*,
};
use std::collections::HashSet;
// IDs of the mask groups we define for the running fox model.
//
// Each mask group defines a set of bones for which animations can be toggled on
// and off.
const MASK_GROUP_HEAD: u32 = 0;
const MASK_GROUP_LEFT_FRONT_LEG: u32 = 1;
const MASK_GROUP_RIGHT_FRONT_LEG: u32 = 2;
const MASK_GROUP_LEFT_HIND_LEG: u32 = 3;
const MASK_GROUP_RIGHT_HIND_LEG: u32 = 4;
const MASK_GROUP_TAIL: u32 = 5;
// The width in pixels of the small buttons that allow the user to toggle a mask
// group on or off.
const MASK_GROUP_BUTTON_WIDTH: f32 = 250.0;
// The names of the bones that each mask group consists of. Each mask group is
// defined as a (prefix, suffix) tuple. The mask group consists of a single
// bone chain rooted at the prefix. For example, if the chain's prefix is
// "A/B/C" and the suffix is "D/E", then the bones that will be included in the
// mask group are "A/B/C", "A/B/C/D", and "A/B/C/D/E".
//
// The fact that our mask groups are single chains of bones isn't an engine
// requirement; it just so happens to be the case for the model we're using. A
// mask group can consist of any set of animation targets, regardless of whether
// they form a single chain.
const MASK_GROUP_PATHS: [(&str, &str); 6] = [
// Head
(
"root/_rootJoint/b_Root_00/b_Hip_01/b_Spine01_02/b_Spine02_03",
"b_Neck_04/b_Head_05",
),
// Left front leg
(
"root/_rootJoint/b_Root_00/b_Hip_01/b_Spine01_02/b_Spine02_03/b_LeftUpperArm_09",
"b_LeftForeArm_010/b_LeftHand_011",
),
// Right front leg
(
"root/_rootJoint/b_Root_00/b_Hip_01/b_Spine01_02/b_Spine02_03/b_RightUpperArm_06",
"b_RightForeArm_07/b_RightHand_08",
),
// Left hind leg
(
"root/_rootJoint/b_Root_00/b_Hip_01/b_LeftLeg01_015",
"b_LeftLeg02_016/b_LeftFoot01_017/b_LeftFoot02_018",
),
// Right hind leg
(
"root/_rootJoint/b_Root_00/b_Hip_01/b_RightLeg01_019",
"b_RightLeg02_020/b_RightFoot01_021/b_RightFoot02_022",
),
// Tail
(
"root/_rootJoint/b_Root_00/b_Hip_01/b_Tail01_012",
"b_Tail02_013/b_Tail03_014",
),
];
#[derive(Clone, Copy, Component)]
struct AnimationControl {
// The ID of the mask group that this button controls.
group_id: u32,
label: AnimationLabel,
}
#[derive(Clone, Copy, Component, PartialEq, Debug)]
enum AnimationLabel {
Idle = 0,
Walk = 1,
Run = 2,
Off = 3,
}
#[derive(Clone, Debug, Resource)]
struct AnimationNodes([AnimationNodeIndex; 3]);
#[derive(Clone, Copy, Debug, Resource)]
struct AppState([MaskGroupState; 6]);
#[derive(Clone, Copy, Debug)]
struct MaskGroupState {
clip: u8,
}
// The application entry point.
fn main() {
App::new()
.add_plugins(DefaultPlugins.set(WindowPlugin {
primary_window: Some(Window {
title: "Bevy Animation Masks Example".into(),
..default()
}),
..default()
}))
.add_systems(Startup, (setup_scene, setup_ui))
.add_systems(Update, setup_animation_graph_once_loaded)
.add_systems(Update, handle_button_toggles)
.add_systems(Update, update_ui)
.insert_resource(AmbientLight {
color: WHITE.into(),
brightness: 100.0,
..default()
})
.init_resource::<AppState>()
.run();
}
// Spawns the 3D objects in the scene, and loads the fox animation from the glTF
// file.
fn setup_scene(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// Spawn the camera.
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-15.0, 10.0, 20.0).looking_at(Vec3::new(0., 1., 0.), Vec3::Y),
));
// Spawn the light.
commands.spawn((
PointLight {
intensity: 10_000_000.0,
shadows_enabled: true,
..default()
},
Transform::from_xyz(-4.0, 8.0, 13.0),
));
// Spawn the fox.
commands.spawn((
SceneRoot(
asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")),
),
Transform::from_scale(Vec3::splat(0.07)),
));
// Spawn the ground.
commands.spawn((
Mesh3d(meshes.add(Circle::new(7.0))),
MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))),
Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)),
));
}
// Creates the UI.
fn setup_ui(mut commands: Commands) {
// Add help text.
commands.spawn((
Text::new("Click on a button to toggle animations for its associated bones"),
Node {
position_type: PositionType::Absolute,
left: Val::Px(12.0),
top: Val::Px(12.0),
..default()
},
));
// Add the buttons that allow the user to toggle mask groups on and off.
commands
.spawn(Node {
flex_direction: FlexDirection::Column,
position_type: PositionType::Absolute,
row_gap: Val::Px(6.0),
left: Val::Px(12.0),
bottom: Val::Px(12.0),
..default()
})
.with_children(|parent| {
let row_node = Node {
flex_direction: FlexDirection::Row,
column_gap: Val::Px(6.0),
..default()
};
add_mask_group_control(parent, "Head", Val::Auto, MASK_GROUP_HEAD);
parent.spawn(row_node.clone()).with_children(|parent| {
add_mask_group_control(
parent,
"Left Front Leg",
Val::Px(MASK_GROUP_BUTTON_WIDTH),
MASK_GROUP_LEFT_FRONT_LEG,
);
add_mask_group_control(
parent,
"Right Front Leg",
Val::Px(MASK_GROUP_BUTTON_WIDTH),
MASK_GROUP_RIGHT_FRONT_LEG,
);
});
parent.spawn(row_node).with_children(|parent| {
add_mask_group_control(
parent,
"Left Hind Leg",
Val::Px(MASK_GROUP_BUTTON_WIDTH),
MASK_GROUP_LEFT_HIND_LEG,
);
add_mask_group_control(
parent,
"Right Hind Leg",
Val::Px(MASK_GROUP_BUTTON_WIDTH),
MASK_GROUP_RIGHT_HIND_LEG,
);
});
add_mask_group_control(parent, "Tail", Val::Auto, MASK_GROUP_TAIL);
});
}
// Adds a button that allows the user to toggle a mask group on and off.
//
// The button will automatically become a child of the parent that owns the
// given `ChildSpawnerCommands`.
fn add_mask_group_control(
parent: &mut ChildSpawnerCommands,
label: &str,
width: Val,
mask_group_id: u32,
) {
let button_text_style = (
TextFont {
font_size: 14.0,
..default()
},
TextColor::WHITE,
);
let selected_button_text_style = (button_text_style.0.clone(), TextColor::BLACK);
let label_text_style = (
button_text_style.0.clone(),
TextColor(Color::Srgba(LIGHT_GRAY)),
);
parent
.spawn((
Node {
border: UiRect::all(Val::Px(1.0)),
width,
flex_direction: FlexDirection::Column,
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
padding: UiRect::ZERO,
margin: UiRect::ZERO,
..default()
},
BorderColor(Color::WHITE),
BorderRadius::all(Val::Px(3.0)),
BackgroundColor(Color::BLACK),
))
.with_children(|builder| {
builder
.spawn((
Node {
border: UiRect::ZERO,
width: Val::Percent(100.0),
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
padding: UiRect::ZERO,
margin: UiRect::ZERO,
..default()
},
BackgroundColor(Color::BLACK),
))
.with_child((
Text::new(label),
label_text_style.clone(),
Node {
margin: UiRect::vertical(Val::Px(3.0)),
..default()
},
));
builder
.spawn((
Node {
width: Val::Percent(100.0),
flex_direction: FlexDirection::Row,
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
border: UiRect::top(Val::Px(1.0)),
..default()
},
BorderColor(Color::WHITE),
))
.with_children(|builder| {
for (index, label) in [
AnimationLabel::Run,
AnimationLabel::Walk,
AnimationLabel::Idle,
AnimationLabel::Off,
]
.iter()
.enumerate()
{
builder
.spawn((
Button,
BackgroundColor(if index > 0 {
Color::BLACK
} else {
Color::WHITE
}),
Node {
flex_grow: 1.0,
border: if index > 0 {
UiRect::left(Val::Px(1.0))
} else {
UiRect::ZERO
},
..default()
},
BorderColor(Color::WHITE),
AnimationControl {
group_id: mask_group_id,
label: *label,
},
))
.with_child((
Text(format!("{:?}", label)),
if index > 0 {
button_text_style.clone()
} else {
selected_button_text_style.clone()
},
TextLayout::new_with_justify(JustifyText::Center),
Node {
flex_grow: 1.0,
margin: UiRect::vertical(Val::Px(3.0)),
..default()
},
));
}
});
});
}
// Builds up the animation graph, including the mask groups, and adds it to the
// entity with the `AnimationPlayer` that the glTF loader created.
fn setup_animation_graph_once_loaded(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut animation_graphs: ResMut<Assets<AnimationGraph>>,
mut players: Query<(Entity, &mut AnimationPlayer), Added<AnimationPlayer>>,
targets: Query<(Entity, &AnimationTarget)>,
) {
for (entity, mut player) in &mut players {
// Load the animation clip from the glTF file.
let mut animation_graph = AnimationGraph::new();
let blend_node = animation_graph.add_additive_blend(1.0, animation_graph.root);
let animation_graph_nodes: [AnimationNodeIndex; 3] =
std::array::from_fn(|animation_index| {
let handle = asset_server.load(
GltfAssetLabel::Animation(animation_index)
.from_asset("models/animated/Fox.glb"),
);
let mask = if animation_index == 0 { 0 } else { 0x3f };
animation_graph.add_clip_with_mask(handle, mask, 1.0, blend_node)
});
// Create each mask group.
let mut all_animation_target_ids = HashSet::new();
for (mask_group_index, (mask_group_prefix, mask_group_suffix)) in
MASK_GROUP_PATHS.iter().enumerate()
{
// Split up the prefix and suffix, and convert them into `Name`s.
let prefix: Vec<_> = mask_group_prefix.split('/').map(Name::new).collect();
let suffix: Vec<_> = mask_group_suffix.split('/').map(Name::new).collect();
// Add each bone in the chain to the appropriate mask group.
for chain_length in 0..=suffix.len() {
let animation_target_id = AnimationTargetId::from_names(
prefix.iter().chain(suffix[0..chain_length].iter()),
);
animation_graph
.add_target_to_mask_group(animation_target_id, mask_group_index as u32);
all_animation_target_ids.insert(animation_target_id);
}
}
// We're doing constructing the animation graph. Add it as an asset.
let animation_graph = animation_graphs.add(animation_graph);
commands
.entity(entity)
.insert(AnimationGraphHandle(animation_graph));
// Remove animation targets that aren't in any of the mask groups. If we
// don't do that, those bones will play all animations at once, which is
// ugly.
for (target_entity, target) in &targets {
if !all_animation_target_ids.contains(&target.id) {
commands.entity(target_entity).remove::<AnimationTarget>();
}
}
// Play the animation.
for animation_graph_node in animation_graph_nodes {
player.play(animation_graph_node).repeat();
}
// Record the graph nodes.
commands.insert_resource(AnimationNodes(animation_graph_nodes));
}
}
// A system that handles requests from the user to toggle mask groups on and
// off.
fn handle_button_toggles(
mut interactions: Query<(&Interaction, &mut AnimationControl), Changed<Interaction>>,
mut animation_players: Query<&AnimationGraphHandle, With<AnimationPlayer>>,
mut animation_graphs: ResMut<Assets<AnimationGraph>>,
mut animation_nodes: Option<ResMut<AnimationNodes>>,
mut app_state: ResMut<AppState>,
) {
let Some(ref mut animation_nodes) = animation_nodes else {
return;
};
for (interaction, animation_control) in interactions.iter_mut() {
// We only care about press events.
if *interaction != Interaction::Pressed {
continue;
}
// Toggle the state of the clip.
app_state.0[animation_control.group_id as usize].clip = animation_control.label as u8;
// Now grab the animation player. (There's only one in our case, but we
// iterate just for clarity's sake.)
for animation_graph_handle in animation_players.iter_mut() {
// The animation graph needs to have loaded.
let Some(animation_graph) = animation_graphs.get_mut(animation_graph_handle) else {
continue;
};
for (clip_index, &animation_node_index) in animation_nodes.0.iter().enumerate() {
let Some(animation_node) = animation_graph.get_mut(animation_node_index) else {
continue;
};
if animation_control.label as usize == clip_index {
animation_node.mask &= !(1 << animation_control.group_id);
} else {
animation_node.mask |= 1 << animation_control.group_id;
}
}
}
}
}
// A system that updates the UI based on the current app state.
fn update_ui(
mut animation_controls: Query<(&AnimationControl, &mut BackgroundColor, &Children)>,
texts: Query<Entity, With<Text>>,
mut writer: TextUiWriter,
app_state: Res<AppState>,
) {
for (animation_control, mut background_color, kids) in animation_controls.iter_mut() {
let enabled =
app_state.0[animation_control.group_id as usize].clip == animation_control.label as u8;
*background_color = if enabled {
BackgroundColor(Color::WHITE)
} else {
BackgroundColor(Color::BLACK)
};
for &kid in kids {
let Ok(text) = texts.get(kid) else {
continue;
};
writer.for_each_color(text, |mut color| {
color.0 = if enabled { Color::BLACK } else { Color::WHITE };
});
}
}
}
impl Default for AppState {
fn default() -> Self {
AppState([MaskGroupState { clip: 0 }; 6])
}
}

View File

@@ -0,0 +1,124 @@
//! Demonstrates how to animate colors in different color spaces using mixing and splines.
use bevy::{math::VectorSpace, prelude::*};
// We define this trait so we can reuse the same code for multiple color types that may be implemented using curves.
trait CurveColor: VectorSpace + Into<Color> + Send + Sync + 'static {}
impl<T: VectorSpace + Into<Color> + Send + Sync + 'static> CurveColor for T {}
// We define this trait so we can reuse the same code for multiple color types that may be implemented using mixing.
trait MixedColor: Mix + Into<Color> + Send + Sync + 'static {}
impl<T: Mix + Into<Color> + Send + Sync + 'static> MixedColor for T {}
#[derive(Debug, Component)]
struct Curve<T: CurveColor>(CubicCurve<T>);
#[derive(Debug, Component)]
struct Mixed<T: MixedColor>([T; 4]);
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(
Update,
(
animate_curve::<LinearRgba>,
animate_curve::<Oklaba>,
animate_curve::<Xyza>,
animate_mixed::<Hsla>,
animate_mixed::<Srgba>,
animate_mixed::<Oklcha>,
),
)
.run();
}
fn setup(mut commands: Commands) {
commands.spawn(Camera2d);
// The color spaces `Oklaba`, `Laba`, `LinearRgba`, `Srgba` and `Xyza` all are either perceptually or physically linear.
// This property allows us to define curves, e.g. bezier curves through these spaces.
// Define the control points for the curve.
// For more information, please see the cubic curve example.
let colors = [
LinearRgba::WHITE,
LinearRgba::rgb(1., 1., 0.), // Yellow
LinearRgba::RED,
LinearRgba::BLACK,
];
// Spawn a sprite using the provided colors as control points.
spawn_curve_sprite(&mut commands, 275., colors);
// Spawn another sprite using the provided colors as control points after converting them to the `Xyza` color space.
spawn_curve_sprite(&mut commands, 175., colors.map(Xyza::from));
spawn_curve_sprite(&mut commands, 75., colors.map(Oklaba::from));
// Other color spaces like `Srgba` or `Hsva` are neither perceptually nor physically linear.
// As such, we cannot use curves in these spaces.
// However, we can still mix these colors and animate that way. In fact, mixing colors works in any color space.
// Spawn a sprite using the provided colors for mixing.
spawn_mixed_sprite(&mut commands, -75., colors.map(Hsla::from));
spawn_mixed_sprite(&mut commands, -175., colors.map(Srgba::from));
spawn_mixed_sprite(&mut commands, -275., colors.map(Oklcha::from));
}
fn spawn_curve_sprite<T: CurveColor>(commands: &mut Commands, y: f32, points: [T; 4]) {
commands.spawn((
Sprite::sized(Vec2::new(75., 75.)),
Transform::from_xyz(0., y, 0.),
Curve(CubicBezier::new([points]).to_curve().unwrap()),
));
}
fn spawn_mixed_sprite<T: MixedColor>(commands: &mut Commands, y: f32, colors: [T; 4]) {
commands.spawn((
Transform::from_xyz(0., y, 0.),
Sprite::sized(Vec2::new(75., 75.)),
Mixed(colors),
));
}
fn animate_curve<T: CurveColor>(
time: Res<Time>,
mut query: Query<(&mut Transform, &mut Sprite, &Curve<T>)>,
) {
let t = (ops::sin(time.elapsed_secs()) + 1.) / 2.;
for (mut transform, mut sprite, cubic_curve) in &mut query {
// position takes a point from the curve where 0 is the initial point
// and 1 is the last point
sprite.color = cubic_curve.0.position(t).into();
transform.translation.x = 600. * (t - 0.5);
}
}
fn animate_mixed<T: MixedColor>(
time: Res<Time>,
mut query: Query<(&mut Transform, &mut Sprite, &Mixed<T>)>,
) {
let t = (ops::sin(time.elapsed_secs()) + 1.) / 2.;
for (mut transform, mut sprite, mixed) in &mut query {
sprite.color = {
// First, we determine the amount of intervals between colors.
// For four colors, there are three intervals between those colors;
let intervals = (mixed.0.len() - 1) as f32;
// Next we determine the index of the first of the two colors to mix.
let start_i = (t * intervals).floor().min(intervals - 1.);
// Lastly we determine the 'local' value of t in this interval.
let local_t = (t * intervals) - start_i;
let color = mixed.0[start_i as usize].mix(&mixed.0[start_i as usize + 1], local_t);
color.into()
};
transform.translation.x = 600. * (t - 0.5);
}
}

View File

@@ -0,0 +1,235 @@
//! Skinned mesh example with mesh and joints data defined in code.
//! Example taken from <https://github.com/KhronosGroup/glTF-Tutorials/blob/master/gltfTutorial/gltfTutorial_019_SimpleSkin.md>
use std::f32::consts::*;
use bevy::{
math::ops,
prelude::*,
render::{
mesh::{
skinning::{SkinnedMesh, SkinnedMeshInverseBindposes},
Indices, PrimitiveTopology, VertexAttributeValues,
},
render_asset::RenderAssetUsages,
},
};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.insert_resource(AmbientLight {
brightness: 3000.0,
..default()
})
.add_systems(Startup, setup)
.add_systems(Update, joint_animation)
.run();
}
/// Used to mark a joint to be animated in the [`joint_animation`] system.
#[derive(Component)]
struct AnimatedJoint(isize);
/// Construct a mesh and a skeleton with 2 joints for that mesh,
/// and mark the second joint to be animated.
/// It is similar to the scene defined in `models/SimpleSkin/SimpleSkin.gltf`
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut skinned_mesh_inverse_bindposes_assets: ResMut<Assets<SkinnedMeshInverseBindposes>>,
) {
// Create a camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(2.5, 2.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// Create inverse bindpose matrices for a skeleton consists of 2 joints
let inverse_bindposes = skinned_mesh_inverse_bindposes_assets.add(vec![
Mat4::from_translation(Vec3::new(-0.5, -1.0, 0.0)),
Mat4::from_translation(Vec3::new(-0.5, -1.0, 0.0)),
]);
// Create a mesh
let mesh = Mesh::new(
PrimitiveTopology::TriangleList,
RenderAssetUsages::RENDER_WORLD,
)
// Set mesh vertex positions
.with_inserted_attribute(
Mesh::ATTRIBUTE_POSITION,
vec![
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 0.5, 0.0],
[1.0, 0.5, 0.0],
[0.0, 1.0, 0.0],
[1.0, 1.0, 0.0],
[0.0, 1.5, 0.0],
[1.0, 1.5, 0.0],
[0.0, 2.0, 0.0],
[1.0, 2.0, 0.0],
],
)
// Add UV coordinates that map the left half of the texture since its a 1 x
// 2 rectangle.
.with_inserted_attribute(
Mesh::ATTRIBUTE_UV_0,
vec![
[0.0, 0.00],
[0.5, 0.00],
[0.0, 0.25],
[0.5, 0.25],
[0.0, 0.50],
[0.5, 0.50],
[0.0, 0.75],
[0.5, 0.75],
[0.0, 1.00],
[0.5, 1.00],
],
)
// Set mesh vertex normals
.with_inserted_attribute(Mesh::ATTRIBUTE_NORMAL, vec![[0.0, 0.0, 1.0]; 10])
// Set mesh vertex joint indices for mesh skinning.
// Each vertex gets 4 indices used to address the `JointTransforms` array in the vertex shader
// as well as `SkinnedMeshJoint` array in the `SkinnedMesh` component.
// This means that a maximum of 4 joints can affect a single vertex.
.with_inserted_attribute(
Mesh::ATTRIBUTE_JOINT_INDEX,
// Need to be explicit here as [u16; 4] could be either Uint16x4 or Unorm16x4.
VertexAttributeValues::Uint16x4(vec![
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0],
]),
)
// Set mesh vertex joint weights for mesh skinning.
// Each vertex gets 4 joint weights corresponding to the 4 joint indices assigned to it.
// The sum of these weights should equal to 1.
.with_inserted_attribute(
Mesh::ATTRIBUTE_JOINT_WEIGHT,
vec![
[1.00, 0.00, 0.0, 0.0],
[1.00, 0.00, 0.0, 0.0],
[0.75, 0.25, 0.0, 0.0],
[0.75, 0.25, 0.0, 0.0],
[0.50, 0.50, 0.0, 0.0],
[0.50, 0.50, 0.0, 0.0],
[0.25, 0.75, 0.0, 0.0],
[0.25, 0.75, 0.0, 0.0],
[0.00, 1.00, 0.0, 0.0],
[0.00, 1.00, 0.0, 0.0],
],
)
// Tell bevy to construct triangles from a list of vertex indices,
// where each 3 vertex indices form a triangle.
.with_inserted_indices(Indices::U16(vec![
0, 1, 3, 0, 3, 2, 2, 3, 5, 2, 5, 4, 4, 5, 7, 4, 7, 6, 6, 7, 9, 6, 9, 8,
]));
let mesh = meshes.add(mesh);
// We're seeding the PRNG here to make this example deterministic for testing purposes.
// This isn't strictly required in practical use unless you need your app to be deterministic.
let mut rng = ChaCha8Rng::seed_from_u64(42);
for i in -5..5 {
// Create joint entities
let joint_0 = commands
.spawn(Transform::from_xyz(
i as f32 * 1.5,
0.0,
// Move quads back a small amount to avoid Z-fighting and not
// obscure the transform gizmos.
-(i as f32 * 0.01).abs(),
))
.id();
let joint_1 = commands.spawn((AnimatedJoint(i), Transform::IDENTITY)).id();
// Set joint_1 as a child of joint_0.
commands.entity(joint_0).add_children(&[joint_1]);
// Each joint in this vector corresponds to each inverse bindpose matrix in `SkinnedMeshInverseBindposes`.
let joint_entities = vec![joint_0, joint_1];
// Create skinned mesh renderer. Note that its transform doesn't affect the position of the mesh.
commands.spawn((
Mesh3d(mesh.clone()),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(
rng.gen_range(0.0..1.0),
rng.gen_range(0.0..1.0),
rng.gen_range(0.0..1.0),
),
base_color_texture: Some(asset_server.load("textures/uv_checker_bw.png")),
..default()
})),
SkinnedMesh {
inverse_bindposes: inverse_bindposes.clone(),
joints: joint_entities,
},
));
}
}
/// Animate the joint marked with [`AnimatedJoint`] component.
fn joint_animation(
time: Res<Time>,
mut query: Query<(&mut Transform, &AnimatedJoint)>,
mut gizmos: Gizmos,
) {
for (mut transform, animated_joint) in &mut query {
match animated_joint.0 {
-5 => {
transform.rotation =
Quat::from_rotation_x(FRAC_PI_2 * ops::sin(time.elapsed_secs()));
}
-4 => {
transform.rotation =
Quat::from_rotation_y(FRAC_PI_2 * ops::sin(time.elapsed_secs()));
}
-3 => {
transform.rotation =
Quat::from_rotation_z(FRAC_PI_2 * ops::sin(time.elapsed_secs()));
}
-2 => {
transform.scale.x = ops::sin(time.elapsed_secs()) + 1.0;
}
-1 => {
transform.scale.y = ops::sin(time.elapsed_secs()) + 1.0;
}
0 => {
transform.translation.x = 0.5 * ops::sin(time.elapsed_secs());
transform.translation.y = ops::cos(time.elapsed_secs());
}
1 => {
transform.translation.y = ops::sin(time.elapsed_secs());
transform.translation.z = ops::cos(time.elapsed_secs());
}
2 => {
transform.translation.x = ops::sin(time.elapsed_secs());
}
3 => {
transform.translation.y = ops::sin(time.elapsed_secs());
transform.scale.x = ops::sin(time.elapsed_secs()) + 1.0;
}
_ => (),
}
// Show transform
let mut axis = *transform;
axis.translation.x += animated_joint.0 as f32 * 1.5;
gizmos.axes(axis, 1.0);
}
}

Some files were not shown because too many files have changed in this diff Show More