Merge branch 'main' into remove-min-and-max-from-LayoutContext

This commit is contained in:
ickshonpe 2024-11-22 06:03:01 +00:00 committed by GitHub
commit 4d3df3f171
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 360 additions and 652 deletions

View file

@ -10,7 +10,6 @@ keywords = ["bevy"]
[dependencies]
# bevy
bevy_animation_derive = { path = "derive", version = "0.15.0-dev" }
bevy_app = { path = "../bevy_app", version = "0.15.0-dev" }
bevy_asset = { path = "../bevy_asset", version = "0.15.0-dev" }
bevy_color = { path = "../bevy_color", version = "0.15.0-dev" }

View file

@ -1,25 +0,0 @@
[package]
name = "bevy_animation_derive"
version = "0.15.0-dev"
edition = "2021"
description = "Derive implementations for bevy_animation"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0"
keywords = ["bevy"]
[lib]
proc-macro = true
[dependencies]
bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.15.0-dev" }
proc-macro2 = "1.0"
quote = "1.0"
syn = { version = "2.0", features = ["full"] }
[lints]
workspace = true
[package.metadata.docs.rs]
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
all-features = true

View file

@ -1,29 +0,0 @@
//! Derive macros for `bevy_animation`.
extern crate proc_macro;
use bevy_macro_utils::BevyManifest;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
/// Used to derive `AnimationEvent` for a type.
#[proc_macro_derive(AnimationEvent)]
pub fn derive_animation_event(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let name = ast.ident;
let manifest = BevyManifest::default();
let bevy_animation_path = manifest.get_path("bevy_animation");
let bevy_ecs_path = manifest.get_path("bevy_ecs");
let animation_event_path = quote! { #bevy_animation_path::animation_event };
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
// TODO: This could derive Event as well.
quote! {
impl #impl_generics #animation_event_path::AnimationEvent for #name #ty_generics #where_clause {
fn trigger(&self, _time: f32, _weight: f32, entity: #bevy_ecs_path::entity::Entity, world: &mut #bevy_ecs_path::world::World) {
world.entity_mut(entity).trigger(Clone::clone(self));
}
}
}
.into()
}

View file

@ -1,281 +0,0 @@
//! Traits and types for triggering events from animations.
use core::{any::Any, fmt::Debug};
use bevy_ecs::prelude::*;
use bevy_reflect::{
prelude::*, utility::NonGenericTypeInfoCell, ApplyError, DynamicTupleStruct, FromType,
GetTypeRegistration, ReflectFromPtr, ReflectKind, ReflectMut, ReflectOwned, ReflectRef,
TupleStructFieldIter, TupleStructInfo, TypeInfo, TypeRegistration, Typed, UnnamedField,
};
pub use bevy_animation_derive::AnimationEvent;
pub(crate) fn trigger_animation_event(
entity: Entity,
time: f32,
weight: f32,
event: Box<dyn AnimationEvent>,
) -> impl Command {
move |world: &mut World| {
event.trigger(time, weight, entity, world);
}
}
/// An event that can be used with animations.
/// It can be derived to trigger as an observer event,
/// if you need more complex behavior, consider
/// a manual implementation.
///
/// # Example
///
/// ```rust
/// # use bevy_animation::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # use bevy_reflect::prelude::*;
/// # use bevy_asset::prelude::*;
/// #
/// #[derive(Event, AnimationEvent, Reflect, Clone)]
/// struct Say(String);
///
/// fn on_say(trigger: Trigger<Say>) {
/// println!("{}", trigger.event().0);
/// }
///
/// fn setup_animation(
/// mut commands: Commands,
/// mut animations: ResMut<Assets<AnimationClip>>,
/// mut graphs: ResMut<Assets<AnimationGraph>>,
/// ) {
/// // Create a new animation and add an event at 1.0s.
/// let mut animation = AnimationClip::default();
/// animation.add_event(1.0, Say("Hello".into()));
///
/// // Create an animation graph.
/// let (graph, animation_index) = AnimationGraph::from_clip(animations.add(animation));
///
/// // Start playing the animation.
/// let mut player = AnimationPlayer::default();
/// player.play(animation_index).repeat();
///
/// commands.spawn((AnimationGraphHandle(graphs.add(graph)), player));
/// }
/// #
/// # bevy_ecs::system::assert_is_system(setup_animation);
/// ```
#[reflect_trait]
pub trait AnimationEvent: CloneableAnimationEvent + Reflect + Send + Sync {
/// Trigger the event, targeting `entity`.
fn trigger(&self, time: f32, weight: f32, entity: Entity, world: &mut World);
}
/// This trait exist so that manual implementors of [`AnimationEvent`]
/// do not have to implement `clone_value`.
#[diagnostic::on_unimplemented(
message = "`{Self}` does not implement `Clone`",
note = "consider annotating `{Self}` with `#[derive(Clone)]`"
)]
pub trait CloneableAnimationEvent {
/// Clone this value into a new `Box<dyn AnimationEvent>`
fn clone_value(&self) -> Box<dyn AnimationEvent>;
}
impl<T: AnimationEvent + Clone> CloneableAnimationEvent for T {
fn clone_value(&self) -> Box<dyn AnimationEvent> {
Box::new(self.clone())
}
}
/// The data that will be used to trigger an animation event.
#[derive(TypePath)]
pub(crate) struct AnimationEventData(pub(crate) Box<dyn AnimationEvent>);
impl AnimationEventData {
pub(crate) fn new(event: impl AnimationEvent) -> Self {
Self(Box::new(event))
}
}
impl Debug for AnimationEventData {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.write_str("AnimationEventData(")?;
PartialReflect::debug(self.0.as_ref(), f)?;
f.write_str(")")?;
Ok(())
}
}
impl Clone for AnimationEventData {
fn clone(&self) -> Self {
Self(CloneableAnimationEvent::clone_value(self.0.as_ref()))
}
}
// We have to implement `GetTypeRegistration` manually because of the embedded
// `Box<dyn AnimationEvent>`, which can't be automatically derived yet.
impl GetTypeRegistration for AnimationEventData {
fn get_type_registration() -> TypeRegistration {
let mut registration = TypeRegistration::of::<Self>();
registration.insert::<ReflectFromPtr>(FromType::<Self>::from_type());
registration
}
}
// We have to implement `Typed` manually because of the embedded
// `Box<dyn AnimationEvent>`, which can't be automatically derived yet.
impl Typed for AnimationEventData {
fn type_info() -> &'static TypeInfo {
static CELL: NonGenericTypeInfoCell = NonGenericTypeInfoCell::new();
CELL.get_or_set(|| {
TypeInfo::TupleStruct(TupleStructInfo::new::<Self>(&[UnnamedField::new::<()>(0)]))
})
}
}
// We have to implement `TupleStruct` manually because of the embedded
// `Box<dyn AnimationEvent>`, which can't be automatically derived yet.
impl TupleStruct for AnimationEventData {
fn field(&self, index: usize) -> Option<&dyn PartialReflect> {
match index {
0 => Some(self.0.as_partial_reflect()),
_ => None,
}
}
fn field_mut(&mut self, index: usize) -> Option<&mut dyn PartialReflect> {
match index {
0 => Some(self.0.as_partial_reflect_mut()),
_ => None,
}
}
fn field_len(&self) -> usize {
1
}
fn iter_fields(&self) -> TupleStructFieldIter {
TupleStructFieldIter::new(self)
}
fn clone_dynamic(&self) -> DynamicTupleStruct {
DynamicTupleStruct::from_iter([PartialReflect::clone_value(&*self.0)])
}
}
// We have to implement `PartialReflect` manually because of the embedded
// `Box<dyn AnimationEvent>`, which can't be automatically derived yet.
impl PartialReflect for AnimationEventData {
#[inline]
fn get_represented_type_info(&self) -> Option<&'static TypeInfo> {
Some(<Self as Typed>::type_info())
}
#[inline]
fn into_partial_reflect(self: Box<Self>) -> Box<dyn PartialReflect> {
self
}
#[inline]
fn as_partial_reflect(&self) -> &dyn PartialReflect {
self
}
#[inline]
fn as_partial_reflect_mut(&mut self) -> &mut dyn PartialReflect {
self
}
fn try_into_reflect(self: Box<Self>) -> Result<Box<dyn Reflect>, Box<dyn PartialReflect>> {
Ok(self)
}
#[inline]
fn try_as_reflect(&self) -> Option<&dyn Reflect> {
Some(self)
}
#[inline]
fn try_as_reflect_mut(&mut self) -> Option<&mut dyn Reflect> {
Some(self)
}
fn try_apply(&mut self, value: &dyn PartialReflect) -> Result<(), ApplyError> {
if let ReflectRef::TupleStruct(struct_value) = value.reflect_ref() {
for (i, value) in struct_value.iter_fields().enumerate() {
if let Some(v) = self.field_mut(i) {
v.try_apply(value)?;
}
}
} else {
return Err(ApplyError::MismatchedKinds {
from_kind: value.reflect_kind(),
to_kind: ReflectKind::TupleStruct,
});
}
Ok(())
}
fn reflect_ref(&self) -> ReflectRef {
ReflectRef::TupleStruct(self)
}
fn reflect_mut(&mut self) -> ReflectMut {
ReflectMut::TupleStruct(self)
}
fn reflect_owned(self: Box<Self>) -> ReflectOwned {
ReflectOwned::TupleStruct(self)
}
fn clone_value(&self) -> Box<dyn PartialReflect> {
Box::new(Clone::clone(self))
}
}
// We have to implement `Reflect` manually because of the embedded
// `Box<dyn AnimationEvent>`, which can't be automatically derived yet.
impl Reflect for AnimationEventData {
#[inline]
fn into_any(self: Box<Self>) -> Box<dyn Any> {
self
}
#[inline]
fn as_any(&self) -> &dyn Any {
self
}
#[inline]
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
#[inline]
fn into_reflect(self: Box<Self>) -> Box<dyn Reflect> {
self
}
#[inline]
fn as_reflect(&self) -> &dyn Reflect {
self
}
#[inline]
fn as_reflect_mut(&mut self) -> &mut dyn Reflect {
self
}
#[inline]
fn set(&mut self, value: Box<dyn Reflect>) -> Result<(), Box<dyn Reflect>> {
*self = value.take()?;
Ok(())
}
}
// We have to implement `FromReflect` manually because of the embedded
// `Box<dyn AnimationEvent>`, which can't be automatically derived yet.
impl FromReflect for AnimationEventData {
fn from_reflect(reflect: &dyn PartialReflect) -> Option<Self> {
Some(reflect.try_downcast_ref::<AnimationEventData>()?.clone())
}
}

View file

@ -11,13 +11,11 @@ extern crate alloc;
pub mod animatable;
pub mod animation_curves;
pub mod animation_event;
pub mod gltf_curves;
pub mod graph;
pub mod transition;
mod util;
use animation_event::{trigger_animation_event, AnimationEvent, AnimationEventData};
use core::{
any::{Any, TypeId},
cell::RefCell,
@ -30,7 +28,7 @@ use prelude::AnimationCurveEvaluator;
use crate::graph::{AnimationGraphHandle, ThreadedAnimationGraphs};
use bevy_app::{App, Plugin, PostUpdate};
use bevy_app::{Animation, App, Plugin, PostUpdate};
use bevy_asset::{Asset, AssetApp, Assets};
use bevy_core::Name;
use bevy_ecs::{
@ -64,12 +62,8 @@ use uuid::Uuid;
pub mod prelude {
#[doc(hidden)]
pub use crate::{
animatable::*,
animation_curves::*,
animation_event::{AnimationEvent, ReflectAnimationEvent},
graph::*,
transition::*,
AnimationClip, AnimationPlayer, AnimationPlugin, VariableCurve,
animatable::*, animation_curves::*, graph::*, transition::*, AnimationClip,
AnimationPlayer, AnimationPlugin, VariableCurve,
};
}
@ -78,6 +72,7 @@ use crate::{
graph::{AnimationGraph, AnimationGraphAssetLoader, AnimationNodeIndex},
transition::{advance_transitions, expire_completed_transitions, AnimationTransitions},
};
use alloc::sync::Arc;
/// The [UUID namespace] of animation targets (e.g. bones).
///
@ -289,7 +284,34 @@ pub struct AnimationClip {
#[derive(Reflect, Debug, Clone)]
struct TimedAnimationEvent {
time: f32,
event: AnimationEventData,
event: AnimationEvent,
}
#[derive(Reflect, Debug, Clone)]
struct AnimationEvent {
#[reflect(ignore)]
trigger: AnimationEventFn,
}
impl AnimationEvent {
fn trigger(&self, commands: &mut Commands, entity: Entity, time: f32, weight: f32) {
(self.trigger.0)(commands, entity, time, weight);
}
}
#[derive(Reflect, Clone)]
struct AnimationEventFn(Arc<dyn Fn(&mut Commands, Entity, f32, f32) + Send + Sync>);
impl Default for AnimationEventFn {
fn default() -> Self {
Self(Arc::new(|_commands, _entity, _time, _weight| {}))
}
}
impl Debug for AnimationEventFn {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("AnimationEventFn").finish()
}
}
#[derive(Reflect, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
@ -472,9 +494,24 @@ impl AnimationClip {
.push(variable_curve);
}
/// Add an [`AnimationEvent`] to an [`AnimationTarget`] named by an [`AnimationTargetId`].
/// Add a untargeted [`Event`] to this [`AnimationClip`].
///
/// The `event` will trigger on the entity matching the target once the `time` (in seconds)
/// The `event` will be cloned and triggered on the [`AnimationPlayer`] entity once the `time` (in seconds)
/// is reached in the animation.
///
/// See also [`add_event_to_target`](Self::add_event_to_target).
pub fn add_event(&mut self, time: f32, event: impl Event + Clone) {
self.add_event_fn(
time,
move |commands: &mut Commands, entity: Entity, _time: f32, _weight: f32| {
commands.entity(entity).trigger(event.clone());
},
);
}
/// Add an [`Event`] to an [`AnimationTarget`] named by an [`AnimationTargetId`].
///
/// The `event` will be cloned and triggered on the entity matching the target once the `time` (in seconds)
/// is reached in the animation.
///
/// Use [`add_event`](Self::add_event) instead if you don't have a specific target.
@ -482,26 +519,69 @@ impl AnimationClip {
&mut self,
target_id: AnimationTargetId,
time: f32,
event: impl AnimationEvent,
event: impl Event + Clone,
) {
self.add_event_to_target_inner(AnimationEventTarget::Node(target_id), time, event);
self.add_event_fn_to_target(
target_id,
time,
move |commands: &mut Commands, entity: Entity, _time: f32, _weight: f32| {
commands.entity(entity).trigger(event.clone());
},
);
}
/// Add a untargeted [`AnimationEvent`] to this [`AnimationClip`].
/// Add a untargeted event function to this [`AnimationClip`].
///
/// The `event` will trigger on the [`AnimationPlayer`] entity once the `time` (in seconds)
/// The `func` will trigger on the [`AnimationPlayer`] entity once the `time` (in seconds)
/// is reached in the animation.
///
/// For a simpler [`Event`]-based alternative, see [`AnimationClip::add_event`].
/// See also [`add_event_to_target`](Self::add_event_to_target).
pub fn add_event(&mut self, time: f32, event: impl AnimationEvent) {
self.add_event_to_target_inner(AnimationEventTarget::Root, time, event);
///
/// ```
/// # use bevy_animation::AnimationClip;
/// # let mut clip = AnimationClip::default();
/// clip.add_event_fn(1.0, |commands, entity, time, weight| {
/// println!("Animation Event Triggered {entity:#?} at time {time} with weight {weight}");
/// })
/// ```
pub fn add_event_fn(
&mut self,
time: f32,
func: impl Fn(&mut Commands, Entity, f32, f32) + Send + Sync + 'static,
) {
self.add_event_internal(AnimationEventTarget::Root, time, func);
}
fn add_event_to_target_inner(
/// Add an event function to an [`AnimationTarget`] named by an [`AnimationTargetId`].
///
/// The `func` will trigger on the entity matching the target once the `time` (in seconds)
/// is reached in the animation.
///
/// For a simpler [`Event`]-based alternative, see [`AnimationClip::add_event_to_target`].
/// Use [`add_event`](Self::add_event) instead if you don't have a specific target.
///
/// ```
/// # use bevy_animation::{AnimationClip, AnimationTargetId};
/// # let mut clip = AnimationClip::default();
/// clip.add_event_fn_to_target(AnimationTargetId::from_iter(["Arm", "Hand"]), 1.0, |commands, entity, time, weight| {
/// println!("Animation Event Triggered {entity:#?} at time {time} with weight {weight}");
/// })
/// ```
pub fn add_event_fn_to_target(
&mut self,
target_id: AnimationTargetId,
time: f32,
func: impl Fn(&mut Commands, Entity, f32, f32) + Send + Sync + 'static,
) {
self.add_event_internal(AnimationEventTarget::Node(target_id), time, func);
}
fn add_event_internal(
&mut self,
target: AnimationEventTarget,
time: f32,
event: impl AnimationEvent,
trigger_fn: impl Fn(&mut Commands, Entity, f32, f32) + Send + Sync + 'static,
) {
self.duration = self.duration.max(time);
let triggers = self.events.entry(target).or_default();
@ -510,7 +590,9 @@ impl AnimationClip {
index,
TimedAnimationEvent {
time,
event: AnimationEventData::new(event),
event: AnimationEvent {
trigger: AnimationEventFn(Arc::new(trigger_fn)),
},
},
),
}
@ -988,12 +1070,7 @@ fn trigger_untargeted_animation_events(
};
for TimedAnimationEvent { time, event } in triggered_events.iter() {
commands.queue(trigger_animation_event(
entity,
*time,
active_animation.weight,
event.clone().0,
));
event.trigger(&mut commands, entity, *time, active_animation.weight);
}
}
}
@ -1195,12 +1272,12 @@ pub fn animate_targets(
for TimedAnimationEvent { time, event } in
triggered_events.iter()
{
commands.queue(trigger_animation_event(
event.trigger(
&mut commands,
entity,
*time,
active_animation.weight,
event.clone().0,
));
);
}
});
}
@ -1252,10 +1329,6 @@ pub fn animate_targets(
});
}
/// Animation system set
#[derive(SystemSet, Debug, Hash, PartialEq, Eq, Clone)]
pub struct Animation;
/// Adds animation support to an app
#[derive(Default)]
pub struct AnimationPlugin;
@ -1573,12 +1646,6 @@ mod tests {
#[derive(Event, Reflect, Clone)]
struct A;
impl AnimationEvent for A {
fn trigger(&self, _time: f32, _weight: f32, target: Entity, world: &mut World) {
world.entity_mut(target).trigger(self.clone());
}
}
#[track_caller]
fn assert_triggered_events_with(
active_animation: &ActiveAnimation,

View file

@ -181,6 +181,10 @@ pub struct PostUpdate;
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
pub struct Last;
/// Animation system set. This exists in [`PostUpdate`].
#[derive(SystemSet, Debug, Hash, PartialEq, Eq, Clone)]
pub struct Animation;
/// Defines the schedules to be run for the [`Main`] schedule, including
/// their order.
#[derive(Resource, Debug)]

View file

@ -1029,8 +1029,8 @@ impl Components {
/// registration will be used.
pub(crate) unsafe fn register_required_components<R: Component>(
&mut self,
required: ComponentId,
requiree: ComponentId,
required: ComponentId,
constructor: fn() -> R,
) -> Result<(), RequiredComponentsError> {
// SAFETY: The caller ensures that the `requiree` is valid.
@ -1083,14 +1083,17 @@ impl Components {
for (component_id, component) in inherited_requirements.iter() {
// Register the required component.
// The inheritance depth is increased by `1` since this is a component required by the original required component.
// The inheritance depth of inherited components is whatever the requiree's
// depth is relative to `required_by_id`, plus the inheritance depth of the
// inherited component relative to the requiree, plus 1 to account for the
// requiree in between.
// SAFETY: Component ID and constructor match the ones on the original requiree.
// The original requiree is responsible for making sure the registration is safe.
unsafe {
required_components.register_dynamic(
*component_id,
component.constructor.clone(),
component.inheritance_depth + 1,
component.inheritance_depth + depth + 1,
);
};
}

View file

@ -2399,6 +2399,40 @@ mod tests {
assert_eq!(world.entity(id).get::<Counter>().unwrap().0, 1);
}
#[test]
fn runtime_required_components_deep_require_does_not_override_shallow_require_deep_subtree_after_shallow(
) {
#[derive(Component)]
struct A;
#[derive(Component, Default)]
struct B;
#[derive(Component, Default)]
struct C;
#[derive(Component, Default)]
struct D;
#[derive(Component, Default)]
struct E;
#[derive(Component)]
struct Counter(i32);
#[derive(Component, Default)]
struct F;
let mut world = World::new();
world.register_required_components::<A, B>();
world.register_required_components::<B, C>();
world.register_required_components::<C, D>();
world.register_required_components::<D, E>();
world.register_required_components_with::<E, Counter>(|| Counter(1));
world.register_required_components_with::<F, Counter>(|| Counter(2));
world.register_required_components::<E, F>();
let id = world.spawn(A).id();
// The "shallower" of the two components is used.
assert_eq!(world.entity(id).get::<Counter>().unwrap().0, 1);
}
#[test]
fn runtime_required_components_existing_archetype() {
#[derive(Component)]

View file

@ -649,7 +649,7 @@ impl<'w, 's, D: QueryData, F: QueryFilter> Query<'w, 's, D, F> {
/// Returns an iterator over the query items generated from an [`Entity`] list.
///
/// Items are returned in the order of the list of entities, and may not be unique if the input
/// doesnn't guarantee uniqueness. Entities that don't match the query are skipped.
/// doesn't guarantee uniqueness. Entities that don't match the query are skipped.
///
/// # Examples
///

View file

@ -515,7 +515,7 @@ impl World {
// SAFETY: We just created the `required` and `requiree` components.
unsafe {
self.components
.register_required_components::<R>(required, requiree, constructor)
.register_required_components::<R>(requiree, required, constructor)
}
}

View file

@ -71,7 +71,7 @@ pub mod prelude {
};
}
use bevy_app::prelude::*;
use bevy_app::{prelude::*, Animation};
use bevy_asset::AssetApp;
#[cfg(feature = "default_font")]
use bevy_asset::{load_internal_binary_asset, Handle};
@ -138,7 +138,8 @@ impl Plugin for TextPlugin {
calculate_bounds_text2d.in_set(VisibilitySystems::CalculateBounds),
)
.chain()
.in_set(Update2dText),
.in_set(Update2dText)
.after(Animation),
)
.add_systems(Last, trim_cosmic_cache);

View file

@ -25,7 +25,6 @@ bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev", features = [
"bevy",
] }
bevy_render = { path = "../bevy_render", version = "0.15.0-dev" }
bevy_animation = { path = "../bevy_animation", version = "0.15.0-dev" }
bevy_sprite = { path = "../bevy_sprite", version = "0.15.0-dev" }
bevy_text = { path = "../bevy_text", version = "0.15.0-dev" }
bevy_picking = { path = "../bevy_picking", version = "0.15.0-dev", optional = true }

View file

@ -1,6 +1,5 @@
use crate::{
CalculatedClip, ComputedNode, DefaultUiCamera, ResolvedBorderRadius, TargetCamera, UiScale,
UiStack,
CalculatedClip, ComputedNode, DefaultUiCamera, ResolvedBorderRadius, TargetCamera, UiStack,
};
use bevy_ecs::{
change_detection::DetectChangesMut,
@ -158,7 +157,6 @@ pub fn ui_focus_system(
windows: Query<&Window>,
mouse_button_input: Res<ButtonInput<MouseButton>>,
touches_input: Res<Touches>,
ui_scale: Res<UiScale>,
ui_stack: Res<UiStack>,
mut node_query: Query<NodeQuery>,
) {
@ -201,19 +199,16 @@ pub fn ui_focus_system(
};
let viewport_position = camera
.logical_viewport_rect()
.map(|rect| rect.min)
.physical_viewport_rect()
.map(|rect| rect.min.as_vec2())
.unwrap_or_default();
windows
.get(window_ref.entity())
.ok()
.and_then(Window::cursor_position)
.and_then(Window::physical_cursor_position)
.or_else(|| touches_input.first_pressed_position())
.map(|cursor_position| (entity, cursor_position - viewport_position))
})
// The cursor position returned by `Window` only takes into account the window scale factor and not `UiScale`.
// To convert the cursor position to logical UI viewport coordinates we have to divide it by `UiScale`.
.map(|(entity, cursor_position)| (entity, cursor_position / ui_scale.0))
.collect();
// prepare an iterator that contains all the nodes that have the cursor in their rect,

View file

@ -1,7 +1,7 @@
use crate::{
experimental::{UiChildren, UiRootNodes},
BorderRadius, ComputedNode, ContentSize, DefaultUiCamera, Display, Node, Outline, OverflowAxis,
ScrollPosition, TargetCamera, UiScale,
ScrollPosition, TargetCamera, UiScale, Val,
};
use bevy_ecs::{
change_detection::{DetectChanges, DetectChangesMut},
@ -335,31 +335,33 @@ with UI components as a child of an entity without UI components, your UI layout
maybe_scroll_position,
)) = node_transform_query.get_mut(entity)
{
let Ok((layout, unrounded_unscaled_size)) = ui_surface.get_layout(entity) else {
let Ok((layout, unrounded_size)) = ui_surface.get_layout(entity) else {
return;
};
let layout_size =
inverse_target_scale_factor * Vec2::new(layout.size.width, layout.size.height);
let unrounded_size = inverse_target_scale_factor * unrounded_unscaled_size;
let layout_location =
inverse_target_scale_factor * Vec2::new(layout.location.x, layout.location.y);
let layout_size = Vec2::new(layout.size.width, layout.size.height);
let layout_location = Vec2::new(layout.location.x, layout.location.y);
// The position of the center of the node, stored in the node's transform
let node_center =
layout_location - parent_scroll_position + 0.5 * (layout_size - parent_size);
// only trigger change detection when the new values are different
if node.size != layout_size || node.unrounded_size != unrounded_size {
if node.size != layout_size
|| node.unrounded_size != unrounded_size
|| node.inverse_scale_factor != inverse_target_scale_factor
{
node.size = layout_size;
node.unrounded_size = unrounded_size;
node.inverse_scale_factor = inverse_target_scale_factor;
}
let taffy_rect_to_border_rect = |rect: taffy::Rect<f32>| BorderRect {
left: rect.left * inverse_target_scale_factor,
right: rect.right * inverse_target_scale_factor,
top: rect.top * inverse_target_scale_factor,
bottom: rect.bottom * inverse_target_scale_factor,
left: rect.left,
right: rect.right,
top: rect.top,
bottom: rect.bottom,
};
node.bypass_change_detection().border = taffy_rect_to_border_rect(layout.border);
@ -369,28 +371,35 @@ with UI components as a child of an entity without UI components, your UI layout
if let Some(border_radius) = maybe_border_radius {
// We don't trigger change detection for changes to border radius
node.bypass_change_detection().border_radius =
border_radius.resolve(node.size, viewport_size);
node.bypass_change_detection().border_radius = border_radius.resolve(
node.size,
viewport_size,
inverse_target_scale_factor.recip(),
);
}
if let Some(outline) = maybe_outline {
// don't trigger change detection when only outlines are changed
let node = node.bypass_change_detection();
node.outline_width = if style.display != Display::None {
outline
.width
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.)
match outline.width {
Val::Px(w) => Val::Px(w / inverse_target_scale_factor),
width => width,
}
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.)
} else {
0.
};
node.outline_offset = outline
.offset
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.);
node.outline_offset = match outline.offset {
Val::Px(offset) => Val::Px(offset / inverse_target_scale_factor),
offset => offset,
}
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.);
}
if transform.translation.truncate() != node_center {
@ -414,8 +423,7 @@ with UI components as a child of an entity without UI components, your UI layout
})
.unwrap_or_default();
let content_size = Vec2::new(layout.content_size.width, layout.content_size.height)
* inverse_target_scale_factor;
let content_size = Vec2::new(layout.content_size.width, layout.content_size.height);
let max_possible_offset = (content_size - layout_size).max(Vec2::ZERO);
let clamped_scroll_position = scroll_position.clamp(Vec2::ZERO, max_possible_offset);
@ -1123,7 +1131,7 @@ mod tests {
.sum();
let parent_width = world.get::<ComputedNode>(parent).unwrap().size.x;
assert!((width_sum - parent_width).abs() < 0.001);
assert!((width_sum - 320.).abs() <= 1.);
assert!((width_sum - 320. * s).abs() <= 1.);
s += r;
}
}

View file

@ -66,7 +66,7 @@ pub mod prelude {
};
}
use bevy_app::prelude::*;
use bevy_app::{prelude::*, Animation};
use bevy_ecs::prelude::*;
use bevy_input::InputSystem;
use bevy_render::{
@ -171,9 +171,7 @@ impl Plugin for UiPlugin {
PostUpdate,
(
CameraUpdateSystem,
UiSystem::Prepare
.before(UiSystem::Stack)
.after(bevy_animation::Animation),
UiSystem::Prepare.before(UiSystem::Stack).after(Animation),
UiSystem::Layout,
UiSystem::PostLayout,
)

View file

@ -65,7 +65,6 @@ pub fn ui_picking(
camera_query: Query<(Entity, &Camera, Has<IsDefaultUiCamera>)>,
default_ui_camera: DefaultUiCamera,
primary_window: Query<Entity, With<PrimaryWindow>>,
ui_scale: Res<UiScale>,
ui_stack: Res<UiStack>,
node_query: Query<NodeQuery>,
mut output: EventWriter<PointerHits>,
@ -95,15 +94,15 @@ pub fn ui_picking(
let Ok((_, camera_data, _)) = camera_query.get(camera) else {
continue;
};
let mut pointer_pos = pointer_location.position;
if let Some(viewport) = camera_data.logical_viewport_rect() {
pointer_pos -= viewport.min;
let mut pointer_pos =
pointer_location.position * camera_data.target_scaling_factor().unwrap_or(1.);
if let Some(viewport) = camera_data.physical_viewport_rect() {
pointer_pos -= viewport.min.as_vec2();
}
let scaled_pointer_pos = pointer_pos / **ui_scale;
pointer_pos_by_camera
.entry(camera)
.or_default()
.insert(pointer_id, scaled_pointer_pos);
.insert(pointer_id, pointer_pos);
}
}

View file

@ -4,7 +4,7 @@ use core::{hash::Hash, ops::Range};
use crate::{
BoxShadow, CalculatedClip, ComputedNode, DefaultUiCamera, RenderUiSystem, ResolvedBorderRadius,
TargetCamera, TransparentUi, UiBoxShadowSamples, UiScale, Val,
TargetCamera, TransparentUi, UiBoxShadowSamples, Val,
};
use bevy_app::prelude::*;
use bevy_asset::*;
@ -237,7 +237,6 @@ pub fn extract_shadows(
mut commands: Commands,
mut extracted_box_shadows: ResMut<ExtractedBoxShadows>,
default_ui_camera: Extract<DefaultUiCamera>,
ui_scale: Extract<Res<UiScale>>,
camera_query: Extract<Query<(Entity, &Camera)>>,
box_shadow_query: Extract<
Query<(
@ -268,37 +267,36 @@ pub fn extract_shadows(
continue;
}
let ui_logical_viewport_size = camera_query
let ui_physical_viewport_size = camera_query
.get(camera_entity)
.ok()
.and_then(|(_, c)| c.logical_viewport_size())
.unwrap_or(Vec2::ZERO)
// The logical window resolution returned by `Window` only takes into account the window scale factor and not `UiScale`,
// so we have to divide by `UiScale` to get the size of the UI viewport.
/ ui_scale.0;
.and_then(|(_, c)| {
c.physical_viewport_size()
.map(|size| Vec2::new(size.x as f32, size.y as f32))
})
.unwrap_or(Vec2::ZERO);
let resolve_val = |val, base| match val {
let scale_factor = uinode.inverse_scale_factor.recip();
let resolve_val = |val, base, scale_factor| match val {
Val::Auto => 0.,
Val::Px(px) => px,
Val::Px(px) => px * scale_factor,
Val::Percent(percent) => percent / 100. * base,
Val::Vw(percent) => percent / 100. * ui_logical_viewport_size.x,
Val::Vh(percent) => percent / 100. * ui_logical_viewport_size.y,
Val::VMin(percent) => percent / 100. * ui_logical_viewport_size.min_element(),
Val::VMax(percent) => percent / 100. * ui_logical_viewport_size.max_element(),
Val::Vw(percent) => percent / 100. * ui_physical_viewport_size.x,
Val::Vh(percent) => percent / 100. * ui_physical_viewport_size.y,
Val::VMin(percent) => percent / 100. * ui_physical_viewport_size.min_element(),
Val::VMax(percent) => percent / 100. * ui_physical_viewport_size.max_element(),
};
let spread_x = resolve_val(box_shadow.spread_radius, uinode.size().x);
let spread_ratio_x = (spread_x + uinode.size().x) / uinode.size().x;
let spread_x = resolve_val(box_shadow.spread_radius, uinode.size().x, scale_factor);
let spread_ratio = (spread_x + uinode.size().x) / uinode.size().x;
let spread = vec2(
spread_x,
(spread_ratio_x * uinode.size().y) - uinode.size().y,
);
let spread = vec2(spread_x, uinode.size().y * spread_ratio - uinode.size().y);
let blur_radius = resolve_val(box_shadow.blur_radius, uinode.size().x);
let blur_radius = resolve_val(box_shadow.blur_radius, uinode.size().x, scale_factor);
let offset = vec2(
resolve_val(box_shadow.x_offset, uinode.size().x),
resolve_val(box_shadow.y_offset, uinode.size().y),
resolve_val(box_shadow.x_offset, uinode.size().x, scale_factor),
resolve_val(box_shadow.y_offset, uinode.size().y, scale_factor),
);
let shadow_size = uinode.size() + spread;
@ -307,10 +305,10 @@ pub fn extract_shadows(
}
let radius = ResolvedBorderRadius {
top_left: uinode.border_radius.top_left * spread_ratio_x,
top_right: uinode.border_radius.top_right * spread_ratio_x,
bottom_left: uinode.border_radius.bottom_left * spread_ratio_x,
bottom_right: uinode.border_radius.bottom_right * spread_ratio_x,
top_left: uinode.border_radius.top_left * spread_ratio,
top_right: uinode.border_radius.top_right * spread_ratio,
bottom_left: uinode.border_radius.bottom_left * spread_ratio,
bottom_right: uinode.border_radius.bottom_right * spread_ratio,
};
extracted_box_shadows.box_shadows.insert(
@ -373,7 +371,6 @@ pub fn queue_shadows(
),
batch_range: 0..0,
extra_index: PhaseItemExtraIndex::NONE,
inverse_scale_factor: 1.,
});
}
}

View file

@ -8,7 +8,6 @@ use crate::widget::ImageNode;
use crate::{
experimental::UiChildren, BackgroundColor, BorderColor, CalculatedClip, ComputedNode,
DefaultUiCamera, Outline, ResolvedBorderRadius, TargetCamera, UiAntiAlias, UiBoxShadowSamples,
UiScale,
};
use bevy_app::prelude::*;
use bevy_asset::{load_internal_asset, AssetEvent, AssetId, Assets, Handle};
@ -19,7 +18,7 @@ use bevy_core_pipeline::{core_2d::Camera2d, core_3d::Camera3d};
use bevy_ecs::entity::{EntityHashMap, EntityHashSet};
use bevy_ecs::prelude::*;
use bevy_image::Image;
use bevy_math::{FloatOrd, Mat4, Rect, URect, UVec4, Vec2, Vec3, Vec3Swizzles, Vec4Swizzles};
use bevy_math::{FloatOrd, Mat4, Rect, UVec4, Vec2, Vec3, Vec3Swizzles, Vec4Swizzles};
use bevy_render::render_phase::ViewSortedRenderPhases;
use bevy_render::sync_world::MainEntity;
use bevy_render::texture::TRANSPARENT_IMAGE_HANDLE;
@ -528,16 +527,10 @@ const UI_CAMERA_TRANSFORM_OFFSET: f32 = -0.1;
#[derive(Component)]
pub struct DefaultCameraView(pub Entity);
#[derive(Component)]
pub struct ExtractedAA {
pub scale_factor: f32,
}
/// Extracts all UI elements associated with a camera into the render world.
pub fn extract_default_ui_camera_view(
mut commands: Commands,
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
ui_scale: Extract<Res<UiScale>>,
query: Extract<
Query<
(
@ -553,41 +546,26 @@ pub fn extract_default_ui_camera_view(
) {
live_entities.clear();
let scale = ui_scale.0.recip();
for (entity, camera, ui_anti_alias, shadow_samples) in &query {
// ignore inactive cameras
if !camera.is_active {
commands
.get_entity(entity)
.expect("Camera entity wasn't synced.")
.remove::<(DefaultCameraView, ExtractedAA, UiBoxShadowSamples)>();
.remove::<(DefaultCameraView, UiAntiAlias, UiBoxShadowSamples)>();
continue;
}
if let (
Some(logical_size),
Some(URect {
min: physical_origin,
..
}),
Some(physical_size),
Some(scale_factor),
) = (
camera.logical_viewport_size(),
camera.physical_viewport_rect(),
camera.physical_viewport_size(),
camera.target_scaling_factor(),
) {
if let Some(physical_viewport_rect) = camera.physical_viewport_rect() {
// use a projection matrix with the origin in the top left instead of the bottom left that comes with OrthographicProjection
let projection_matrix = Mat4::orthographic_rh(
0.0,
logical_size.x * scale,
logical_size.y * scale,
physical_viewport_rect.width() as f32,
physical_viewport_rect.height() as f32,
0.0,
0.0,
UI_CAMERA_FAR,
);
let default_camera_view = commands
.spawn((
ExtractedView {
@ -599,12 +577,10 @@ pub fn extract_default_ui_camera_view(
),
clip_from_world: None,
hdr: camera.hdr,
viewport: UVec4::new(
physical_origin.x,
physical_origin.y,
physical_size.x,
physical_size.y,
),
viewport: UVec4::from((
physical_viewport_rect.min,
physical_viewport_rect.size(),
)),
color_grading: Default::default(),
},
TemporaryRenderEntity,
@ -614,10 +590,8 @@ pub fn extract_default_ui_camera_view(
.get_entity(entity)
.expect("Camera entity wasn't synced.");
entity_commands.insert(DefaultCameraView(default_camera_view));
if ui_anti_alias != Some(&UiAntiAlias::Off) {
entity_commands.insert(ExtractedAA {
scale_factor: (scale_factor * ui_scale.0),
});
if let Some(ui_anti_alias) = ui_anti_alias {
entity_commands.insert(*ui_anti_alias);
}
if let Some(shadow_samples) = shadow_samples {
entity_commands.insert(*shadow_samples);
@ -635,10 +609,8 @@ pub fn extract_default_ui_camera_view(
pub fn extract_text_sections(
mut commands: Commands,
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
camera_query: Extract<Query<&Camera>>,
default_ui_camera: Extract<DefaultUiCamera>,
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
ui_scale: Extract<Res<UiScale>>,
uinode_query: Extract<
Query<(
Entity,
@ -678,32 +650,18 @@ pub fn extract_text_sections(
continue;
}
let scale_factor = camera_query
.get(camera_entity)
.ok()
.and_then(Camera::target_scaling_factor)
.unwrap_or(1.0)
* ui_scale.0;
let inverse_scale_factor = scale_factor.recip();
let Ok(&render_camera_entity) = mapping.get(camera_entity) else {
continue;
};
// Align the text to the nearest physical pixel:
// Align the text to the nearest pixel:
// * Translate by minus the text node's half-size
// (The transform translates to the center of the node but the text coordinates are relative to the node's top left corner)
// * Multiply the logical coordinates by the scale factor to get its position in physical coordinates
// * Round the physical position to the nearest physical pixel
// * Multiply by the rounded physical position by the inverse scale factor to return to logical coordinates
let logical_top_left = -0.5 * uinode.size();
// * Round the position to the nearest physical pixel
let mut transform = global_transform.affine()
* bevy_math::Affine3A::from_translation(logical_top_left.extend(0.));
transform.translation *= scale_factor;
* bevy_math::Affine3A::from_translation((-0.5 * uinode.size()).extend(0.));
transform.translation = transform.translation.round();
transform.translation *= inverse_scale_factor;
let mut color = LinearRgba::WHITE;
let mut current_span = usize::MAX;
@ -730,15 +688,14 @@ pub fn extract_text_sections(
.unwrap_or_default();
current_span = *span_index;
}
let atlas = texture_atlases.get(&atlas_info.texture_atlas).unwrap();
let mut rect = atlas.textures[atlas_info.location.glyph_index].as_rect();
rect.min *= inverse_scale_factor;
rect.max *= inverse_scale_factor;
let rect = texture_atlases
.get(&atlas_info.texture_atlas)
.unwrap()
.textures[atlas_info.location.glyph_index]
.as_rect();
extracted_uinodes.glyphs.push(ExtractedGlyph {
transform: transform
* Mat4::from_translation(position.extend(0.) * inverse_scale_factor),
transform: transform * Mat4::from_translation(position.extend(0.)),
rect,
});
@ -762,7 +719,7 @@ pub fn extract_text_sections(
camera_entity: render_camera_entity.id(),
rect,
item: ExtractedUiItem::Glyphs {
atlas_scaling: Vec2::splat(inverse_scale_factor),
atlas_scaling: Vec2::ONE,
range: start..end,
},
main_entity: entity.into(),
@ -795,7 +752,6 @@ struct UiVertex {
pub size: [f32; 2],
/// Position relative to the center of the UI node.
pub point: [f32; 2],
pub inverse_scale_factor: f32,
}
#[derive(Resource)]
@ -846,13 +802,13 @@ pub fn queue_uinodes(
ui_pipeline: Res<UiPipeline>,
mut pipelines: ResMut<SpecializedRenderPipelines<UiPipeline>>,
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
mut views: Query<(Entity, &ExtractedView, Option<&ExtractedAA>)>,
mut views: Query<(Entity, &ExtractedView, Option<&UiAntiAlias>)>,
pipeline_cache: Res<PipelineCache>,
draw_functions: Res<DrawFunctions<TransparentUi>>,
) {
let draw_function = draw_functions.read().id::<DrawUi>();
for (entity, extracted_uinode) in extracted_uinodes.uinodes.iter() {
let Ok((view_entity, view, extracted_aa)) = views.get_mut(extracted_uinode.camera_entity)
let Ok((view_entity, view, ui_anti_alias)) = views.get_mut(extracted_uinode.camera_entity)
else {
continue;
};
@ -866,7 +822,7 @@ pub fn queue_uinodes(
&ui_pipeline,
UiPipelineKey {
hdr: view.hdr,
anti_alias: extracted_aa.is_some(),
anti_alias: matches!(ui_anti_alias, None | Some(UiAntiAlias::On)),
},
);
transparent_phase.add(TransparentUi {
@ -880,7 +836,6 @@ pub fn queue_uinodes(
// batch_range will be calculated in prepare_uinodes
batch_range: 0..0,
extra_index: PhaseItemExtraIndex::NONE,
inverse_scale_factor: extracted_aa.map(|aa| aa.scale_factor).unwrap_or(1.),
});
}
}
@ -1151,7 +1106,6 @@ pub fn prepare_uinodes(
border: [border.left, border.top, border.right, border.bottom],
size: rect_size.xy().into(),
point: points[i].into(),
inverse_scale_factor: item.inverse_scale_factor,
});
}
@ -1255,7 +1209,6 @@ pub fn prepare_uinodes(
border: [0.0; 4],
size: size.into(),
point: [0.0; 2],
inverse_scale_factor: item.inverse_scale_factor,
});
}

View file

@ -74,8 +74,6 @@ impl SpecializedRenderPipeline for UiPipeline {
VertexFormat::Float32x2,
// position relative to the center
VertexFormat::Float32x2,
// inverse scale factor
VertexFormat::Float32,
],
);
let shader_defs = if key.anti_alias {

View file

@ -97,7 +97,6 @@ pub struct TransparentUi {
pub draw_function: DrawFunctionId,
pub batch_range: Range<u32>,
pub extra_index: PhaseItemExtraIndex,
pub inverse_scale_factor: f32,
}
impl PhaseItem for TransparentUi {

View file

@ -22,7 +22,6 @@ struct VertexOutput {
// Position relative to the center of the rectangle.
@location(6) point: vec2<f32>,
@location(7) @interpolate(flat) scale_factor: f32,
@builtin(position) position: vec4<f32>,
};
@ -40,7 +39,6 @@ fn vertex(
@location(5) border: vec4<f32>,
@location(6) size: vec2<f32>,
@location(7) point: vec2<f32>,
@location(8) scale_factor: f32,
) -> VertexOutput {
var out: VertexOutput;
out.uv = vertex_uv;
@ -51,7 +49,6 @@ fn vertex(
out.size = size;
out.border = border;
out.point = point;
out.scale_factor = scale_factor;
return out;
}
@ -118,9 +115,9 @@ fn sd_inset_rounded_box(point: vec2<f32>, size: vec2<f32>, radius: vec4<f32>, in
}
// get alpha for antialiasing for sdf
fn antialias(distance: f32, scale_factor: f32) -> f32 {
fn antialias(distance: f32) -> f32 {
// Using the fwidth(distance) was causing artifacts, so just use the distance.
return clamp(0.0, 1.0, (0.5 - scale_factor * distance));
return clamp(0.0, 1.0, (0.5 - distance));
}
fn draw(in: VertexOutput, texture_color: vec4<f32>) -> vec4<f32> {
@ -151,7 +148,7 @@ fn draw(in: VertexOutput, texture_color: vec4<f32>) -> vec4<f32> {
// This select statement ensures we only perform anti-aliasing where a non-zero width border
// is present, otherwise an outline about the external boundary would be drawn even without
// a border.
let t = select(1.0 - step(0.0, border_distance), antialias(border_distance, in.scale_factor), external_distance < internal_distance);
let t = select(1.0 - step(0.0, border_distance), antialias(border_distance), external_distance < internal_distance);
#else
let t = 1.0 - step(0.0, border_distance);
#endif
@ -167,7 +164,7 @@ fn draw_background(in: VertexOutput, texture_color: vec4<f32>) -> vec4<f32> {
let internal_distance = sd_inset_rounded_box(in.point, in.size, in.radius, in.border);
#ifdef ANTI_ALIAS
let t = antialias(internal_distance, in.scale_factor);
let t = antialias(internal_distance);
#else
let t = 1.0 - step(0.0, internal_distance);
#endif

View file

@ -655,7 +655,6 @@ pub fn queue_ui_material_nodes<M: UiMaterial>(
),
batch_range: 0..0,
extra_index: PhaseItemExtraIndex::NONE,
inverse_scale_factor: 1.,
});
}
}

View file

@ -237,6 +237,7 @@ pub struct ExtractedUiTextureSlice {
pub image_scale_mode: SpriteImageMode,
pub flip_x: bool,
pub flip_y: bool,
pub inverse_scale_factor: f32,
pub main_entity: MainEntity,
}
@ -331,6 +332,7 @@ pub fn extract_ui_texture_slices(
atlas_rect,
flip_x: image.flip_x,
flip_y: image.flip_y,
inverse_scale_factor: uinode.inverse_scale_factor,
main_entity: entity.into(),
},
);
@ -372,7 +374,6 @@ pub fn queue_ui_slices(
),
batch_range: 0..0,
extra_index: PhaseItemExtraIndex::NONE,
inverse_scale_factor: 1.,
});
}
}
@ -609,7 +610,7 @@ pub fn prepare_ui_slices(
let [slices, border, repeat] = compute_texture_slices(
image_size,
uinode_rect.size(),
uinode_rect.size() * texture_slices.inverse_scale_factor,
&texture_slices.image_scale_mode,
);

View file

@ -22,7 +22,7 @@ pub struct ComputedNode {
/// The order of the node in the UI layout.
/// Nodes with a higher stack index are drawn on top of and receive interactions before nodes with lower stack indices.
pub(crate) stack_index: u32,
/// The size of the node as width and height in logical pixels
/// The size of the node as width and height in physical pixels
///
/// automatically calculated by [`super::layout::ui_layout_system`]
pub(crate) size: Vec2,
@ -37,29 +37,34 @@ pub struct ComputedNode {
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
pub(crate) outline_offset: f32,
/// The unrounded size of the node as width and height in logical pixels.
/// The unrounded size of the node as width and height in physical pixels.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
pub(crate) unrounded_size: Vec2,
/// Resolved border values in logical pixels
/// Resolved border values in physical pixels
/// Border updates bypass change detection.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
pub(crate) border: BorderRect,
/// Resolved border radius values in logical pixels.
/// Resolved border radius values in physical pixels.
/// Border radius updates bypass change detection.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
pub(crate) border_radius: ResolvedBorderRadius,
/// Resolved padding values in logical pixels
/// Resolved padding values in physical pixels
/// Padding updates bypass change detection.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
pub(crate) padding: BorderRect,
/// Inverse scale factor for this Node.
/// Multiply physical coordinates by the inverse scale factor to give logical coordinates.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
pub(crate) inverse_scale_factor: f32,
}
impl ComputedNode {
/// The calculated node size as width and height in logical pixels.
/// The calculated node size as width and height in physical pixels.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
#[inline]
@ -82,7 +87,7 @@ impl ComputedNode {
self.stack_index
}
/// The calculated node size as width and height in logical pixels before rounding.
/// The calculated node size as width and height in physical pixels before rounding.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
#[inline]
@ -90,7 +95,7 @@ impl ComputedNode {
self.unrounded_size
}
/// Returns the thickness of the UI node's outline in logical pixels.
/// Returns the thickness of the UI node's outline in physical pixels.
/// If this value is negative or `0.` then no outline will be rendered.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
@ -99,7 +104,7 @@ impl ComputedNode {
self.outline_width
}
/// Returns the amount of space between the outline and the edge of the node in logical pixels.
/// Returns the amount of space between the outline and the edge of the node in physical pixels.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
#[inline]
@ -139,7 +144,7 @@ impl ComputedNode {
}
}
/// Returns the thickness of the node's border on each edge in logical pixels.
/// Returns the thickness of the node's border on each edge in physical pixels.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
#[inline]
@ -147,7 +152,7 @@ impl ComputedNode {
self.border
}
/// Returns the border radius for each of the node's corners in logical pixels.
/// Returns the border radius for each of the node's corners in physical pixels.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
#[inline]
@ -155,7 +160,7 @@ impl ComputedNode {
self.border_radius
}
/// Returns the inner border radius for each of the node's corners in logical pixels.
/// Returns the inner border radius for each of the node's corners in physical pixels.
pub fn inner_radius(&self) -> ResolvedBorderRadius {
fn clamp_corner(r: f32, size: Vec2, offset: Vec2) -> f32 {
let s = 0.5 * size + offset;
@ -177,7 +182,7 @@ impl ComputedNode {
}
}
/// Returns the thickness of the node's padding on each edge in logical pixels.
/// Returns the thickness of the node's padding on each edge in physical pixels.
///
/// Automatically calculated by [`super::layout::ui_layout_system`].
#[inline]
@ -185,7 +190,7 @@ impl ComputedNode {
self.padding
}
/// Returns the combined inset on each edge including both padding and border thickness in logical pixels.
/// Returns the combined inset on each edge including both padding and border thickness in physical pixels.
#[inline]
pub const fn content_inset(&self) -> BorderRect {
BorderRect {
@ -195,6 +200,13 @@ impl ComputedNode {
bottom: self.border.bottom + self.padding.bottom,
}
}
/// Returns the inverse of the scale factor for this node.
/// To convert from physical coordinates to logical coordinates multiply by this value.
#[inline]
pub const fn inverse_scale_factor(&self) -> f32 {
self.inverse_scale_factor
}
}
impl ComputedNode {
@ -207,6 +219,7 @@ impl ComputedNode {
border_radius: ResolvedBorderRadius::ZERO,
border: BorderRect::ZERO,
padding: BorderRect::ZERO,
inverse_scale_factor: 1.,
};
}
@ -2330,10 +2343,15 @@ impl BorderRadius {
}
/// Compute the logical border radius for a single corner from the given values
pub fn resolve_single_corner(radius: Val, node_size: Vec2, viewport_size: Vec2) -> f32 {
pub fn resolve_single_corner(
radius: Val,
node_size: Vec2,
viewport_size: Vec2,
scale_factor: f32,
) -> f32 {
match radius {
Val::Auto => 0.,
Val::Px(px) => px,
Val::Px(px) => px * scale_factor,
Val::Percent(percent) => node_size.min_element() * percent / 100.,
Val::Vw(percent) => viewport_size.x * percent / 100.,
Val::Vh(percent) => viewport_size.y * percent / 100.,
@ -2343,19 +2361,44 @@ impl BorderRadius {
.clamp(0., 0.5 * node_size.min_element())
}
pub fn resolve(&self, node_size: Vec2, viewport_size: Vec2) -> ResolvedBorderRadius {
pub fn resolve(
&self,
node_size: Vec2,
viewport_size: Vec2,
scale_factor: f32,
) -> ResolvedBorderRadius {
ResolvedBorderRadius {
top_left: Self::resolve_single_corner(self.top_left, node_size, viewport_size),
top_right: Self::resolve_single_corner(self.top_right, node_size, viewport_size),
bottom_left: Self::resolve_single_corner(self.bottom_left, node_size, viewport_size),
bottom_right: Self::resolve_single_corner(self.bottom_right, node_size, viewport_size),
top_left: Self::resolve_single_corner(
self.top_left,
node_size,
viewport_size,
scale_factor,
),
top_right: Self::resolve_single_corner(
self.top_right,
node_size,
viewport_size,
scale_factor,
),
bottom_left: Self::resolve_single_corner(
self.bottom_left,
node_size,
viewport_size,
scale_factor,
),
bottom_right: Self::resolve_single_corner(
self.bottom_right,
node_size,
viewport_size,
scale_factor,
),
}
}
}
/// Represents the resolved border radius values for a UI node.
///
/// The values are in logical pixels.
/// The values are in physical pixels.
#[derive(Copy, Clone, Debug, Default, PartialEq, Reflect)]
pub struct ResolvedBorderRadius {
pub top_left: f32,

View file

@ -115,7 +115,8 @@ fn update_clipping(
clip_rect.max.x -= clip_inset.right;
clip_rect.max.y -= clip_inset.bottom;
clip_rect = clip_rect.inflate(node.overflow_clip_margin.margin.max(0.));
clip_rect = clip_rect
.inflate(node.overflow_clip_margin.margin.max(0.) / computed_node.inverse_scale_factor);
if node.overflow.x == OverflowAxis::Visible {
clip_rect.min.x = -f32::INFINITY;

View file

@ -350,10 +350,7 @@ fn queue_text(
TextBounds::UNBOUNDED
} else {
// `scale_factor` is already multiplied by `UiScale`
TextBounds::new(
node.unrounded_size.x * scale_factor,
node.unrounded_size.y * scale_factor,
)
TextBounds::new(node.unrounded_size.x, node.unrounded_size.y)
};
let text_layout_info = text_layout_info.into_inner();
@ -398,12 +395,7 @@ fn queue_text(
#[allow(clippy::too_many_arguments)]
pub fn text_system(
mut textures: ResMut<Assets<Image>>,
mut scale_factors_buffer: Local<EntityHashMap<f32>>,
mut last_scale_factors: Local<EntityHashMap<f32>>,
fonts: Res<Assets<Font>>,
camera_query: Query<(Entity, &Camera)>,
default_ui_camera: DefaultUiCamera,
ui_scale: Res<UiScale>,
mut texture_atlases: ResMut<Assets<TextureAtlasLayout>>,
mut font_atlas_sets: ResMut<FontAtlasSets>,
mut text_pipeline: ResMut<TextPipeline>,
@ -414,40 +406,13 @@ pub fn text_system(
&mut TextLayoutInfo,
&mut TextNodeFlags,
&mut ComputedTextBlock,
Option<&TargetCamera>,
)>,
mut text_reader: TextUiReader,
mut font_system: ResMut<CosmicFontSystem>,
mut swash_cache: ResMut<SwashCache>,
) {
scale_factors_buffer.clear();
for (entity, node, block, text_layout_info, text_flags, mut computed, maybe_camera) in
&mut text_query
{
let Some(camera_entity) = maybe_camera
.map(TargetCamera::entity)
.or(default_ui_camera.get())
else {
continue;
};
let scale_factor = match scale_factors_buffer.entry(camera_entity) {
Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => *entry.insert(
camera_query
.get(camera_entity)
.ok()
.and_then(|(_, c)| c.target_scaling_factor())
.unwrap_or(1.0)
* ui_scale.0,
),
};
let inverse_scale_factor = scale_factor.recip();
if last_scale_factors.get(&camera_entity) != Some(&scale_factor)
|| node.is_changed()
|| text_flags.needs_recompute
{
for (entity, node, block, text_layout_info, text_flags, mut computed) in &mut text_query {
if node.is_changed() || text_flags.needs_recompute {
queue_text(
entity,
&fonts,
@ -455,8 +420,8 @@ pub fn text_system(
&mut font_atlas_sets,
&mut texture_atlases,
&mut textures,
scale_factor,
inverse_scale_factor,
node.inverse_scale_factor.recip(),
node.inverse_scale_factor,
block,
node,
text_flags,
@ -468,5 +433,4 @@ pub fn text_system(
);
}
}
core::mem::swap(&mut *last_scale_factors, &mut *scale_factors_buffer);
}

View file

@ -34,8 +34,7 @@ struct Animations {
graph: Handle<AnimationGraph>,
}
#[derive(Event, AnimationEvent, Reflect, Clone)]
#[reflect(AnimationEvent)]
#[derive(Event, Reflect, Clone)]
struct OnStep;
fn observe_on_step(

View file

@ -11,39 +11,27 @@ fn main() {
.add_plugins(DefaultPlugins)
.add_event::<MessageEvent>()
.add_systems(Startup, setup)
.add_systems(PreUpdate, (animate_text_opacity, edit_message))
.add_systems(Update, animate_text_opacity)
.add_observer(edit_message)
.run();
}
#[derive(Component)]
struct MessageText;
#[derive(Event, Reflect, Clone)]
#[reflect(AnimationEvent)]
#[derive(Event, Clone)]
struct MessageEvent {
value: String,
color: Color,
}
// AnimationEvent can also be derived, but doing so will
// trigger it as an observer event which is triggered in PostUpdate.
// We need to set the message text before that so it is
// updated before rendering without a one frame delay.
impl AnimationEvent for MessageEvent {
fn trigger(&self, _time: f32, _weight: f32, _entity: Entity, world: &mut World) {
world.send_event(self.clone());
}
}
fn edit_message(
mut event_reader: EventReader<MessageEvent>,
trigger: Trigger<MessageEvent>,
text: Single<(&mut Text2d, &mut TextColor), With<MessageText>>,
) {
let (mut text, mut color) = text.into_inner();
for event in event_reader.read() {
text.0 = event.value.clone();
color.0 = event.color;
}
text.0 = trigger.event().value.clone();
color.0 = trigger.event().color;
}
fn setup(

View file

@ -233,13 +233,13 @@ fn update_animation(
fn update_transform<T: UpdateTransform + Component>(
animation: Res<AnimationState>,
mut containers: Query<(&mut Transform, &mut Node, &T)>,
mut containers: Query<(&mut Transform, &mut Node, &ComputedNode, &T)>,
) {
for (mut transform, mut node, update_transform) in &mut containers {
for (mut transform, mut node, computed_node, update_transform) in &mut containers {
update_transform.update(animation.t, &mut transform);
node.left = Val::Px(transform.translation.x);
node.top = Val::Px(transform.translation.y);
node.left = Val::Px(transform.translation.x * computed_node.inverse_scale_factor());
node.top = Val::Px(transform.translation.y * computed_node.inverse_scale_factor());
}
}

View file

@ -51,26 +51,24 @@ fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
..default()
})
.with_children(|parent| {
for ([width, height], flip_x, flip_y) in [
([160., 160.], false, false),
([320., 160.], false, true),
([320., 160.], true, false),
([160., 160.], true, true),
] {
parent.spawn((
ImageNode {
image: image.clone(),
flip_x,
flip_y,
image_mode: NodeImageMode::Sliced(slicer.clone()),
..default()
},
Node {
width: Val::Px(width),
height: Val::Px(height),
..default()
},
));
for [columns, rows] in [[3., 3.], [4., 4.], [5., 4.], [4., 5.], [5., 5.]] {
for (flip_x, flip_y) in [(false, false), (false, true), (true, false), (true, true)]
{
parent.spawn((
ImageNode {
image: image.clone(),
flip_x,
flip_y,
image_mode: NodeImageMode::Sliced(slicer.clone()),
..default()
},
Node {
width: Val::Px(16. * columns),
height: Val::Px(16. * rows),
..default()
},
));
}
}
});
}

View file

@ -34,7 +34,6 @@ crates=(
bevy_core_pipeline
bevy_input
bevy_gilrs
bevy_animation/derive
bevy_animation
bevy_pbr
bevy_gltf