mirror of
https://github.com/bevyengine/bevy
synced 2024-11-26 06:30:19 +00:00
upgrade legion
This commit is contained in:
parent
7fe2288814
commit
75ab0e21e5
6 changed files with 214 additions and 46 deletions
|
@ -95,6 +95,9 @@ impl FilterResult for Option<bool> {
|
|||
pub trait Filter<T: Copy>: Send + Sync + Sized {
|
||||
type Iter: Iterator + Send + Sync;
|
||||
|
||||
// Called when a query is about to begin execution.
|
||||
fn init(&self) {}
|
||||
|
||||
/// Pulls iterator data out of the source.
|
||||
fn collect(&self, source: T) -> Self::Iter;
|
||||
|
||||
|
@ -168,6 +171,9 @@ pub trait EntityFilter: Send + Clone {
|
|||
type ChunksetFilter: for<'a> Filter<ChunksetFilterData<'a>> + Clone;
|
||||
type ChunkFilter: for<'a> Filter<ChunkFilterData<'a>> + Clone;
|
||||
|
||||
/// Initializes the entity filter for iteration.
|
||||
fn init(&self);
|
||||
|
||||
/// Gets mutable references to both inner filters.
|
||||
fn filters(
|
||||
&self,
|
||||
|
@ -239,6 +245,12 @@ where
|
|||
type ChunksetFilter = S;
|
||||
type ChunkFilter = C;
|
||||
|
||||
fn init(&self) {
|
||||
self.arch_filter.init();
|
||||
self.chunkset_filter.init();
|
||||
self.chunk_filter.init();
|
||||
}
|
||||
|
||||
fn filters(
|
||||
&self,
|
||||
) -> (
|
||||
|
@ -380,7 +392,7 @@ impl<'a, 'b, F: Filter<ArchetypeFilterData<'a>>> Iterator for FilterArchIter<'a,
|
|||
}
|
||||
}
|
||||
|
||||
/// An iterator which yields the index of chunks that match a filter.
|
||||
/// An iterator which yields the index of chuinks that match a filter.
|
||||
pub struct FilterChunkIter<'a, 'b, F: Filter<ChunksetFilterData<'a>>> {
|
||||
filter: &'b F,
|
||||
chunks: Enumerate<F::Iter>,
|
||||
|
@ -458,6 +470,9 @@ pub struct Passthrough;
|
|||
impl<'a> Filter<ArchetypeFilterData<'a>> for Passthrough {
|
||||
type Iter = Take<Repeat<()>>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, arch: ArchetypeFilterData<'a>) -> Self::Iter {
|
||||
std::iter::repeat(()).take(arch.component_types.len())
|
||||
|
@ -470,6 +485,9 @@ impl<'a> Filter<ArchetypeFilterData<'a>> for Passthrough {
|
|||
impl<'a> Filter<ChunksetFilterData<'a>> for Passthrough {
|
||||
type Iter = Take<Repeat<()>>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, sets: ChunksetFilterData<'a>) -> Self::Iter {
|
||||
std::iter::repeat(()).take(sets.archetype_data.len())
|
||||
|
@ -482,6 +500,9 @@ impl<'a> Filter<ChunksetFilterData<'a>> for Passthrough {
|
|||
impl<'a> Filter<ChunkFilterData<'a>> for Passthrough {
|
||||
type Iter = Take<Repeat<()>>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, chunk: ChunkFilterData<'a>) -> Self::Iter {
|
||||
std::iter::repeat(()).take(chunk.chunks.len())
|
||||
|
@ -520,6 +541,9 @@ impl ActiveFilter for Any {}
|
|||
impl<'a> Filter<ArchetypeFilterData<'a>> for Any {
|
||||
type Iter = Take<Repeat<()>>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, arch: ArchetypeFilterData<'a>) -> Self::Iter {
|
||||
std::iter::repeat(()).take(arch.component_types.len())
|
||||
|
@ -532,6 +556,9 @@ impl<'a> Filter<ArchetypeFilterData<'a>> for Any {
|
|||
impl<'a> Filter<ChunksetFilterData<'a>> for Any {
|
||||
type Iter = Take<Repeat<()>>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, sets: ChunksetFilterData<'a>) -> Self::Iter {
|
||||
std::iter::repeat(()).take(sets.archetype_data.len())
|
||||
|
@ -544,6 +571,9 @@ impl<'a> Filter<ChunksetFilterData<'a>> for Any {
|
|||
impl<'a> Filter<ChunkFilterData<'a>> for Any {
|
||||
type Iter = Take<Repeat<()>>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, chunk: ChunkFilterData<'a>) -> Self::Iter {
|
||||
std::iter::repeat(()).take(chunk.chunks.len())
|
||||
|
@ -592,6 +622,9 @@ impl<F> ActiveFilter for Not<F> {}
|
|||
impl<'a, T: Copy, F: Filter<T>> Filter<T> for Not<F> {
|
||||
type Iter = F::Iter;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) { self.filter.init(); }
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, source: T) -> Self::Iter { self.filter.collect(source) }
|
||||
|
||||
|
@ -648,6 +681,9 @@ impl<T> ActiveFilter for And<(T,)> {}
|
|||
impl<'a, T: Copy, F: Filter<T>> Filter<T> for And<(F,)> {
|
||||
type Iter = F::Iter;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) { self.filters.0.init(); }
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, source: T) -> Self::Iter { self.filters.0.collect(source) }
|
||||
|
||||
|
@ -717,6 +753,13 @@ macro_rules! impl_and_filter {
|
|||
// type Iter = crate::zip::Zip<( $( $ty::Iter ),* )>;
|
||||
type Iter = recursive_zip!(@type $($ty::Iter),*);
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {
|
||||
#![allow(non_snake_case)]
|
||||
let ($( $ty, )*) = &self.filters;
|
||||
$( $ty.init(); )*
|
||||
}
|
||||
|
||||
fn collect(&self, source: T) -> Self::Iter {
|
||||
#![allow(non_snake_case)]
|
||||
let ($( $ty, )*) = &self.filters;
|
||||
|
@ -818,6 +861,13 @@ macro_rules! impl_or_filter {
|
|||
// type Iter = crate::zip::Zip<( $( $ty::Iter ),* )>;
|
||||
type Iter = recursive_zip!(@type $($ty::Iter),*);
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {
|
||||
#![allow(non_snake_case)]
|
||||
let ($( $ty, )*) = &self.filters;
|
||||
$( $ty.init(); )*
|
||||
}
|
||||
|
||||
fn collect(&self, source: T) -> Self::Iter {
|
||||
#![allow(non_snake_case)]
|
||||
let ($( $ty, )*) = &self.filters;
|
||||
|
@ -905,7 +955,7 @@ impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h,
|
|||
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k);
|
||||
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k, L => l);
|
||||
|
||||
/// A filter which requires that all chunks contain entity data components of type `T`.
|
||||
/// A filter qhich requires that all chunks contain entity data components of type `T`.
|
||||
#[derive(Debug)]
|
||||
pub struct ComponentFilter<T>(PhantomData<T>);
|
||||
|
||||
|
@ -923,6 +973,9 @@ impl<T> Clone for ComponentFilter<T> {
|
|||
impl<'a, T: Component> Filter<ArchetypeFilterData<'a>> for ComponentFilter<T> {
|
||||
type Iter = SliceVecIter<'a, ComponentTypeId>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter {
|
||||
source.component_types.iter()
|
||||
|
@ -995,6 +1048,9 @@ impl<T> Clone for TagFilter<T> {
|
|||
impl<'a, T: Tag> Filter<ArchetypeFilterData<'a>> for TagFilter<T> {
|
||||
type Iter = SliceVecIter<'a, TagTypeId>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
#[inline]
|
||||
fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter { source.tag_types.iter() }
|
||||
|
||||
|
@ -1067,6 +1123,9 @@ impl<'a, T> Clone for TagValueFilter<'a, T> {
|
|||
impl<'a, 'b, T: Tag> Filter<ChunksetFilterData<'a>> for TagValueFilter<'b, T> {
|
||||
type Iter = Iter<'a, T>;
|
||||
|
||||
#[inline]
|
||||
fn init(&self) {}
|
||||
|
||||
fn collect(&self, source: ChunksetFilterData<'a>) -> Self::Iter {
|
||||
unsafe {
|
||||
source
|
||||
|
@ -1132,14 +1191,16 @@ impl<'a, T> std::ops::BitOr<Passthrough> for TagValueFilter<'a, T> {
|
|||
/// chunk since the last time the filter was executed.
|
||||
#[derive(Debug)]
|
||||
pub struct ComponentChangedFilter<T: Component> {
|
||||
last_read_version: AtomicU64,
|
||||
high_water_mark: AtomicU64,
|
||||
version_threshold: AtomicU64,
|
||||
phantom: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T: Component> ComponentChangedFilter<T> {
|
||||
fn new() -> ComponentChangedFilter<T> {
|
||||
ComponentChangedFilter {
|
||||
last_read_version: AtomicU64::new(0),
|
||||
high_water_mark: AtomicU64::new(0),
|
||||
version_threshold: AtomicU64::new(0),
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -1150,29 +1211,63 @@ impl<T: Component> ActiveFilter for ComponentChangedFilter<T> {}
|
|||
impl<T: Component> Clone for ComponentChangedFilter<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
last_read_version: AtomicU64::new(self.last_read_version.load(Ordering::Relaxed)),
|
||||
high_water_mark: AtomicU64::new(self.high_water_mark.load(Ordering::Relaxed)),
|
||||
version_threshold: AtomicU64::new(self.version_threshold.load(Ordering::Relaxed)),
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T> {
|
||||
type Iter = Iter<'a, ComponentStorage>;
|
||||
type Iter = ComponentChangedState<'a, ComponentStorage>;
|
||||
|
||||
fn collect(&self, source: ChunkFilterData<'a>) -> Self::Iter { source.chunks.iter() }
|
||||
#[inline]
|
||||
fn init(&self) {
|
||||
let version = self.high_water_mark.load(Ordering::Relaxed);
|
||||
let mut threshold = self.version_threshold.load(Ordering::Relaxed);
|
||||
if threshold < version {
|
||||
loop {
|
||||
match self.version_threshold.compare_exchange_weak(
|
||||
threshold,
|
||||
version,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
Ok(_) => break,
|
||||
Err(stored_last_read) => {
|
||||
threshold = stored_last_read;
|
||||
if threshold >= version {
|
||||
// matched version is already considered visited, update no longer needed
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect(&self, source: ChunkFilterData<'a>) -> Self::Iter {
|
||||
let compare_version = self.version_threshold.load(Ordering::Relaxed);
|
||||
ComponentChangedState {
|
||||
iter: source.chunks.iter(),
|
||||
version_threshold: compare_version,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_match(&self, item: &<Self::Iter as Iterator>::Item) -> Option<bool> {
|
||||
let components = item.components(ComponentTypeId::of::<T>());
|
||||
let (version_threshold, storage) = item;
|
||||
|
||||
let components = storage.components(ComponentTypeId::of::<T>());
|
||||
if components.is_none() {
|
||||
return Some(false);
|
||||
}
|
||||
|
||||
let version = components.unwrap().version();
|
||||
let mut last_read = self.last_read_version.load(Ordering::Relaxed);
|
||||
let mut last_read = self.high_water_mark.load(Ordering::Relaxed);
|
||||
if last_read < version {
|
||||
loop {
|
||||
match self.last_read_version.compare_exchange_weak(
|
||||
match self.high_water_mark.compare_exchange_weak(
|
||||
last_read,
|
||||
version,
|
||||
Ordering::Relaxed,
|
||||
|
@ -1181,13 +1276,16 @@ impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T>
|
|||
Ok(_) => break,
|
||||
Err(stored_last_read) => {
|
||||
last_read = stored_last_read;
|
||||
if last_read < version {
|
||||
if last_read >= version {
|
||||
// matched version is already considered visited, update no longer needed
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if version > *version_threshold {
|
||||
Some(true)
|
||||
} else {
|
||||
Some(false)
|
||||
|
@ -1195,6 +1293,19 @@ impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T>
|
|||
}
|
||||
}
|
||||
|
||||
pub struct ComponentChangedState<'a, T: Component> {
|
||||
iter: Iter<'a, T>,
|
||||
version_threshold: u64,
|
||||
}
|
||||
|
||||
impl<'a, T: Component> Iterator for ComponentChangedState<'a, T> {
|
||||
type Item = (u64, &'a T);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|c| (self.version_threshold, c))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Component> std::ops::Not for ComponentChangedFilter<T> {
|
||||
type Output = Not<Self>;
|
||||
|
||||
|
@ -1241,6 +1352,7 @@ impl<'a, T: Component> std::ops::BitOr<Passthrough> for ComponentChangedFilter<T
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::filter_fns::*;
|
||||
use crate::prelude::*;
|
||||
|
||||
#[test]
|
||||
pub fn create() {
|
||||
|
@ -1249,4 +1361,40 @@ mod test {
|
|||
let filter = component::<usize>() | tag_value(&5isize);
|
||||
tracing::trace!(?filter);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn component_changed_filter() {
|
||||
let _ = tracing_subscriber::fmt::try_init();
|
||||
|
||||
let universe = Universe::new();
|
||||
let mut world = universe.create_world();
|
||||
|
||||
let entity1 = world.insert((), vec![(1usize,)])[0];
|
||||
let entity2 = world.insert((), vec![(2usize, false)])[0];
|
||||
|
||||
let query = <Read<usize>>::query().filter(changed::<usize>());
|
||||
|
||||
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||
|
||||
*world.get_component_mut::<usize>(entity1).unwrap() = 3usize;
|
||||
|
||||
assert_eq!(1, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||
|
||||
*world.get_component_mut::<usize>(entity1).unwrap() = 4usize;
|
||||
*world.get_component_mut::<usize>(entity2).unwrap() = 5usize;
|
||||
|
||||
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||
|
||||
*world.get_component_mut::<usize>(entity1).unwrap() = 6usize;
|
||||
*world.get_component_mut::<usize>(entity1).unwrap() = 7usize;
|
||||
*world.get_component_mut::<usize>(entity2).unwrap() = 8usize;
|
||||
|
||||
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||
|
||||
*world.get_component_mut::<usize>(entity2).unwrap() = 6usize;
|
||||
*world.get_component_mut::<usize>(entity2).unwrap() = 7usize;
|
||||
*world.get_component_mut::<usize>(entity1).unwrap() = 8usize;
|
||||
|
||||
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -949,6 +949,7 @@ where
|
|||
&'a self,
|
||||
world: &'data World,
|
||||
) -> ChunkViewIter<'data, 'a, V, F::ArchetypeFilter, F::ChunksetFilter, F::ChunkFilter> {
|
||||
self.filter.init();
|
||||
let (arch_filter, chunkset_filter, chunk_filter) = self.filter.filters();
|
||||
let storage = world.storage();
|
||||
let archetypes = arch_filter
|
||||
|
@ -1190,6 +1191,7 @@ where
|
|||
<F::ChunksetFilter as Filter<ChunksetFilterData<'data>>>::Iter: FissileIterator,
|
||||
<F::ChunkFilter as Filter<ChunkFilterData<'data>>>::Iter: FissileIterator,
|
||||
{
|
||||
self.filter.init();
|
||||
let (arch_filter, chunkset_filter, chunk_filter) = self.filter.filters();
|
||||
let storage = world.storage();
|
||||
let archetypes = FissileEnumerate::new(arch_filter.collect(ArchetypeFilterData {
|
||||
|
|
|
@ -1425,8 +1425,11 @@ impl ComponentStorage {
|
|||
// free component memory
|
||||
unsafe {
|
||||
let ptr = self.component_data.take().unwrap();
|
||||
|
||||
if self.component_layout.size() > 0 {
|
||||
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
|
||||
}
|
||||
}
|
||||
|
||||
self.update_mem_gauge();
|
||||
}
|
||||
|
@ -1446,6 +1449,7 @@ impl ComponentStorage {
|
|||
|
||||
unsafe {
|
||||
// allocating backing store
|
||||
if self.component_layout.size() > 0 {
|
||||
let ptr = std::alloc::alloc(self.component_layout);
|
||||
self.component_data = Some(NonNull::new_unchecked(ptr));
|
||||
|
||||
|
@ -1454,6 +1458,10 @@ impl ComponentStorage {
|
|||
let &offset = self.component_offsets.get(type_id).unwrap();
|
||||
*component.ptr.get_mut() = ptr.add(offset);
|
||||
}
|
||||
} else {
|
||||
self.component_data =
|
||||
Some(NonNull::new(self.component_layout.align() as *mut u8).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
self.update_mem_gauge();
|
||||
|
@ -1516,12 +1524,14 @@ impl Drop for ComponentStorage {
|
|||
self.update_count_gauge();
|
||||
|
||||
// free the chunk's memory
|
||||
if self.component_layout.size() > 0 {
|
||||
unsafe {
|
||||
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Provides raw access to component data slices.
|
||||
#[repr(align(64))]
|
||||
|
|
|
@ -462,8 +462,6 @@ impl World {
|
|||
/// This function has the overhead of moving the entity to either an existing or new archetype,
|
||||
/// causing a memory copy of the entity to a new location. This function should not be used
|
||||
/// multiple times in successive order.
|
||||
///
|
||||
/// `World::add_components` should be used for adding multiple omponents to an entity at once.
|
||||
pub fn add_component<T: Component>(
|
||||
&mut self,
|
||||
entity: Entity,
|
||||
|
|
|
@ -168,8 +168,13 @@ impl Executor {
|
|||
64,
|
||||
Default::default(),
|
||||
);
|
||||
let mut component_mutated =
|
||||
FxHashMap::<ComponentTypeId, Vec<usize>>::with_capacity_and_hasher(
|
||||
let mut component_last_mutated =
|
||||
FxHashMap::<ComponentTypeId, usize>::with_capacity_and_hasher(
|
||||
64,
|
||||
Default::default(),
|
||||
);
|
||||
let mut component_last_read =
|
||||
FxHashMap::<ComponentTypeId, usize>::with_capacity_and_hasher(
|
||||
64,
|
||||
Default::default(),
|
||||
);
|
||||
|
@ -220,23 +225,28 @@ impl Executor {
|
|||
|
||||
// find component access dependencies
|
||||
let mut comp_dependencies = FxHashSet::default();
|
||||
for comp in read_comp {
|
||||
if let Some(ns) = component_mutated.get(comp) {
|
||||
for n in ns {
|
||||
comp_dependencies.insert(*n);
|
||||
}
|
||||
}
|
||||
}
|
||||
for comp in write_comp {
|
||||
if let Some(ns) = component_mutated.get(comp) {
|
||||
for n in ns {
|
||||
// Writes have to be exclusive, so we are dependent on reads too
|
||||
trace!(component = ?comp, "Write component");
|
||||
if let Some(n) = component_last_read.get(comp) {
|
||||
trace!(system_index = n, "Added read dependency");
|
||||
comp_dependencies.insert(*n);
|
||||
}
|
||||
if let Some(n) = component_last_mutated.get(comp) {
|
||||
trace!(system_index = n, "Added write dependency");
|
||||
comp_dependencies.insert(*n);
|
||||
}
|
||||
component_mutated
|
||||
.entry(*comp)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(i);
|
||||
component_last_mutated.insert(*comp, i);
|
||||
}
|
||||
|
||||
// Do reads after writes to ensure we don't overwrite last_read
|
||||
for comp in read_comp {
|
||||
trace!(component = ?comp, "Read component");
|
||||
if let Some(n) = component_last_mutated.get(comp) {
|
||||
trace!(system_index = n, "Added write dependency");
|
||||
comp_dependencies.insert(*n);
|
||||
}
|
||||
component_last_read.insert(*comp, i);
|
||||
}
|
||||
|
||||
trace!(depentants = ?comp_dependencies, "Computed dynamic dependants");
|
||||
|
|
|
@ -1627,11 +1627,11 @@ mod tests {
|
|||
struct Comp2(f32, f32, f32);
|
||||
|
||||
let components = vec![
|
||||
(Pos(69., 69., 69.), Vel(69., 69., 69.)),
|
||||
(Pos(69., 69., 69.), Vel(69., 69., 69.)),
|
||||
(Comp1(69., 69., 69.), Comp2(69., 69., 69.)),
|
||||
(Comp1(69., 69., 69.), Comp2(69., 69., 69.)),
|
||||
];
|
||||
|
||||
let mut expected = HashMap::<Entity, (Pos, Vel)>::new();
|
||||
let mut expected = HashMap::<Entity, (Comp1, Comp2)>::new();
|
||||
|
||||
for (i, e) in world.insert((), components.clone()).iter().enumerate() {
|
||||
if let Some((pos, rot)) = components.get(i) {
|
||||
|
@ -1670,7 +1670,7 @@ mod tests {
|
|||
let system3 = SystemBuilder::<()>::new("TestSystem3")
|
||||
.with_query(<(Write<Comp1>, Write<Comp2>)>::query())
|
||||
.build(move |_, world, _, query| {
|
||||
query.iter_mut(world).for_each(|(mut one, two)| {
|
||||
query.iter_mut(world).for_each(|(mut one, mut two)| {
|
||||
assert_eq!(one.0, 456.);
|
||||
assert_eq!(one.1, 456.);
|
||||
assert_eq!(one.2, 456.);
|
||||
|
@ -1683,9 +1683,9 @@ mod tests {
|
|||
one.1 = 789.;
|
||||
one.2 = 789.;
|
||||
|
||||
one.0 = 789.;
|
||||
one.1 = 789.;
|
||||
one.2 = 789.;
|
||||
two.0 = 789.;
|
||||
two.1 = 789.;
|
||||
two.2 = 789.;
|
||||
});
|
||||
});
|
||||
|
||||
|
|
Loading…
Reference in a new issue