move bevy crates to their own folder

This commit is contained in:
Carter Anderson 2020-04-24 17:57:20 -07:00
parent 791c475354
commit 87066cafd3
228 changed files with 18 additions and 17281 deletions

View file

@ -24,44 +24,29 @@ winit = ["bevy_winit"]
[dependencies]
# bevy
bevy_app = { path = "bevy_app" }
bevy_asset = { path = "bevy_asset", optional = true }
bevy_core = { path = "bevy_core", optional = true }
bevy_derive = { path = "bevy_derive", optional = true }
bevy_diagnostic = { path = "bevy_diagnostic", optional = true }
bevy_gltf = { path = "bevy_gltf", optional = true }
bevy_input = { path = "bevy_input", optional = true }
bevy_pbr = { path = "bevy_pbr", optional = true }
bevy_render = { path = "bevy_render", optional = true }
bevy_serialization = { path = "bevy_serialization", optional = true }
bevy_transform = { path = "bevy_transform", optional = true }
bevy_ui = { path = "bevy_ui", optional = true }
bevy_window = { path = "bevy_window", optional = true }
bevy_wgpu = { path = "bevy_wgpu", optional = true }
bevy_winit = { path = "bevy_winit", optional = true }
legion = { path = "bevy_legion" }
bevy_app = { path = "crates/bevy_app" }
bevy_asset = { path = "crates/bevy_asset", optional = true }
bevy_core = { path = "crates/bevy_core", optional = true }
bevy_derive = { path = "crates/bevy_derive", optional = true }
bevy_diagnostic = { path = "crates/bevy_diagnostic", optional = true }
bevy_gltf = { path = "crates/bevy_gltf", optional = true }
bevy_input = { path = "crates/bevy_input", optional = true }
bevy_pbr = { path = "crates/bevy_pbr", optional = true }
bevy_render = { path = "crates/bevy_render", optional = true }
bevy_serialization = { path = "crates/bevy_serialization", optional = true }
bevy_transform = { path = "crates/bevy_transform", optional = true }
bevy_ui = { path = "crates/bevy_ui", optional = true }
bevy_window = { path = "crates/bevy_window", optional = true }
bevy_wgpu = { path = "crates/bevy_wgpu", optional = true }
bevy_winit = { path = "crates/bevy_winit", optional = true }
legion = { path = "crates/bevy_legion" }
# other
log = { version = "0.4", features = ["release_max_level_info"] }
glam = "0.8.6"
[workspace]
members = [
"bevy_app",
"bevy_asset",
"bevy_core",
"bevy_derive",
"bevy_diagnostic",
"bevy_gltf",
"bevy_input",
"bevy_pbr",
"bevy_render",
"bevy_serialization",
"bevy_transform",
"bevy_legion",
"bevy_ui",
"bevy_window",
"bevy_wgpu",
"bevy_winit",
"crates/*",
]
[dev-dependencies]

View file

@ -1,101 +0,0 @@
name: CI
on:
push:
branches:
- master
pull_request:
jobs:
check:
name: Compile
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- uses: actions/cache@v1.0.1
with:
path: target
key: ${{ runner.OS }}-build-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.OS }}-build-
- uses: actions-rs/cargo@v1
with:
command: test
args: --all
check-features:
name: Features
runs-on: ubuntu-latest
needs: [check]
strategy:
matrix:
features:
- --manifest-path=example/Cargo.toml
- --all-features --release
- --no-default-features --release
- --all-features
- --no-default-features
- --no-default-features --features par-iter
- --no-default-features --features par-schedule
- --no-default-features --features metrics
- --no-default-features --features ffi
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- uses: actions/cache@v1.0.1
with:
path: target
key: ${{ runner.OS }}-build-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.OS }}-build-
- uses: actions-rs/cargo@v1
with:
command: test
args: --all ${{ matrix.features }}
fmt:
name: Rustfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
components: rustfmt
- uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add clippy
- uses: actions-rs/cargo@v1
with:
command: clippy
args: --all -- -D warnings

View file

@ -1,3 +0,0 @@
target
**/*.rs.bk
Cargo.lock

View file

@ -1,2 +0,0 @@
# Default ignored files
/workspace.xml

View file

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/benches" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/legion_core/target" />
<excludeFolder url="file://$MODULE_DIR$/legion_systems/target" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View file

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
</project>

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/legion.iml" filepath="$PROJECT_DIR$/.idea/legion.iml" />
</modules>
</component>
</project>

View file

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

View file

@ -1,61 +0,0 @@
[package]
name = "legion"
version = "0.2.1"
description = "High performance entity component system (ECS) library"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
repository = "https://github.com/TomGillen/legion"
keywords = ["ecs", "game"]
categories = ["game-engines", "data-structures"]
readme = "readme.md"
license = "MIT"
edition = "2018"
[badges]
travis-ci = { repository = "TomGillen/legion", branch = "master" }
[features]
default = ["par-iter", "par-schedule", "ffi"]
par-iter = ["legion-core/par-iter", "legion-systems/par-iter"]
par-schedule = ["legion-systems/par-schedule"]
log = ["tracing/log", "tracing/log-always"]
ffi = ["legion-core/ffi"]
serialize = ["legion-core/serialize"]
metrics = ["legion-core/metrics"]
# [workspace]
# members = [
# "legion_core",
# "legion_systems",
# ]
[dependencies]
legion-core = { path = "legion_core", version = "0.2.1", default-features = false }
legion-systems = { path = "legion_systems", version = "0.2.1", default-features = false }
[dev-dependencies]
criterion = "0.3"
cgmath = "0.17"
tracing-subscriber = "0.2"
erased-serde = "0.3"
serde = { version = "1", features = ["derive"]}
uuid = { version = "0.8", features = ["v4"] }
tracing = "0.1"
itertools = "0.8"
rayon = "1.2"
crossbeam-channel = "0.4.0"
[[bench]]
name = "benchmarks"
harness = false
[[bench]]
name = "parallel_query"
harness = false
[[bench]]
name = "transform"
harness = false
[[bench]]
name = "insertion"
harness = false

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) [year] [fullname]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 42 KiB

View file

@ -1,195 +0,0 @@
use criterion::*;
use itertools::*;
use legion::prelude::*;
#[derive(Copy, Clone, Debug, PartialEq)]
struct A(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct B(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct C(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct D(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct E(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct F(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct Tag(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct Position(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct Rotation(f32);
fn create_entities(
world: &mut World,
variants: &mut [Box<dyn FnMut(Entity, &mut World)>],
num_components: usize,
count: usize,
) {
let len_variants = variants.len();
let components = (0..)
.flat_map(|step| (0..len_variants).map(move |i| (i + i * step) % len_variants))
.chunks(num_components);
for initializers in (&components).into_iter().take(count) {
let entity = world.insert((), Some((A(0.0),)))[0];
for i in initializers {
let init = variants.get_mut(i).unwrap();
init(entity, world);
}
}
}
fn add_background_entities(world: &mut World, count: usize) {
create_entities(
world,
&mut [
Box::new(|e, w| w.add_component(e, A(0.0)).unwrap()),
Box::new(|e, w| w.add_component(e, B(0.0)).unwrap()),
Box::new(|e, w| w.add_component(e, C(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(0.0)).unwrap()),
Box::new(|e, w| w.add_component(e, D(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(1.0)).unwrap()),
Box::new(|e, w| w.add_component(e, E(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(2.0)).unwrap()),
Box::new(|e, w| w.add_component(e, F(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(3.0)).unwrap()),
],
5,
count,
);
}
fn setup(n: usize) -> World {
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((), (0..n).map(|_| (Position(0.), Rotation(0.))));
world
}
fn bench_create_delete(c: &mut Criterion) {
c.bench_function_over_inputs(
"create-delete",
|b, count| {
let mut world = setup(0);
b.iter(|| {
let entities = world
.insert((), (0..*count).map(|_| (Position(0.),)))
.to_vec();
for e in entities {
world.delete(e);
}
})
},
(0..10).map(|i| i * 100),
);
}
fn bench_iter_simple(c: &mut Criterion) {
c.bench_function("iter-simple", |b| {
let mut world = setup(2000);
add_background_entities(&mut world, 10000);
let query = <(Read<Position>, Write<Rotation>)>::query();
b.iter(|| {
for (pos, mut rot) in query.iter_mut(&mut world) {
rot.0 = pos.0;
}
});
});
}
fn bench_iter_complex(c: &mut Criterion) {
c.bench_function("iter-complex", |b| {
let mut world = setup(0);
add_background_entities(&mut world, 10000);
for i in 0..200 {
world.insert(
(Tag(i as f32),),
(0..2000).map(|_| (Position(0.), Rotation(0.))),
);
}
let query = <(Read<Position>, Write<Rotation>)>::query()
.filter(!component::<A>() & tag_value(&Tag(2.0)));
b.iter(|| {
for (pos, mut rot) in query.iter_mut(&mut world) {
rot.0 = pos.0;
}
});
});
}
fn bench_iter_chunks_simple(c: &mut Criterion) {
c.bench_function("iter-chunks-simple", |b| {
let mut world = setup(10000);
add_background_entities(&mut world, 10000);
let query = <(Write<Position>, Read<Rotation>)>::query();
b.iter(|| {
for c in query.iter_chunks_mut(&mut world) {
unsafe {
c.components_mut::<Position>()
.unwrap()
.get_unchecked_mut(0)
.0 = 0.0
};
}
});
});
}
fn bench_iter_chunks_complex(c: &mut Criterion) {
c.bench_function("iter-chunks-complex", |b| {
let mut world = setup(0);
add_background_entities(&mut world, 10000);
for i in 0..200 {
world.insert(
(Tag(i as f32),),
(0..10000).map(|_| (Position(0.), Rotation(0.))),
);
}
let query = <(Write<Position>, Read<Rotation>)>::query()
.filter(!component::<A>() & tag_value(&Tag(2.0)));
b.iter(|| {
for c in query.iter_chunks_mut(&mut world) {
unsafe {
c.components_mut::<Position>()
.unwrap()
.get_unchecked_mut(0)
.0 = 0.0
};
}
});
});
}
criterion_group!(
basic,
bench_create_delete,
bench_iter_simple,
bench_iter_complex,
bench_iter_chunks_simple,
bench_iter_chunks_complex
);
criterion_main!(basic);

View file

@ -1,69 +0,0 @@
use criterion::*;
use legion::prelude::*;
fn bench_insert_zero_baseline(c: &mut Criterion) {
c.bench_function("insert_zero_baseline", |b| {
b.iter(|| {
//let universe = Universe::new();
//let mut world = universe.create_world();
let components: Vec<isize> = (0..10000).map(|i| i).collect();
criterion::black_box(components);
});
});
}
fn bench_insert_one_baseline(c: &mut Criterion) {
c.bench_function("insert_one_baseline", |b| {
b.iter(|| {
let universe = Universe::new();
let mut world = universe.create_world();
let components: Vec<isize> = (0..10000).map(|i| i).collect();
criterion::black_box(components);
world.insert((), vec![(1usize,)]);
});
});
}
fn bench_insert_unbatched(c: &mut Criterion) {
c.bench_function("insert_unbatched", |b| {
b.iter(|| {
let universe = Universe::new();
let mut world = universe.create_world();
let components: Vec<isize> = (0..10000).map(|i| i).collect();
for component in components {
world.insert((), vec![(component,)]);
}
});
});
}
fn bench_insert_batched(c: &mut Criterion) {
c.bench(
"insert_batched",
ParameterizedBenchmark::new(
"counts",
|b, n| {
b.iter(|| {
let universe = Universe::new();
let mut world = universe.create_world();
let components: Vec<(isize,)> = (0..*n).map(|i| (i,)).collect();
world.insert((), components);
});
},
(1..11).map(|i| i * 1000),
),
);
}
criterion_group!(
basic,
bench_insert_zero_baseline,
bench_insert_one_baseline,
bench_insert_unbatched,
bench_insert_batched,
);
criterion_main!(basic);

View file

@ -1,181 +0,0 @@
use criterion::*;
use itertools::*;
use legion::prelude::*;
use rayon::join;
#[derive(Copy, Clone, Debug, PartialEq)]
struct A(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct B(f32);
#[derive(Copy, Clone, Debug, PartialEq)]
struct C(f32);
#[derive(Copy, Clone, Debug)]
enum Variants {
AB(A, B),
AC(A, C),
}
fn index(v: Variants) -> u8 {
match v {
Variants::AB(_, _) => 0,
Variants::AC(_, _) => 1,
}
}
fn generate(i: u8) -> Variants {
match i {
0 => Variants::AB(A(0.0), B(0.0)),
_ => Variants::AC(A(0.0), C(0.0)),
}
}
fn data(n: usize) -> Vec<Variants> {
let mut v = Vec::<Variants>::new();
for _ in 0..n {
v.push(generate(0));
}
for _ in 0..n {
v.push(generate(1));
}
v
}
fn setup(data: &[Variants]) -> World {
let universe = Universe::new();
let mut world = universe.create_world();
for (i, group) in &data.iter().group_by(|x| index(**x)) {
match i {
0 => world.insert(
(),
group
.map(|x| {
if let Variants::AB(a, b) = x {
(*a, *b)
} else {
panic!();
}
})
.collect::<Vec<_>>(),
),
_ => world.insert(
(),
group
.map(|x| {
if let Variants::AC(a, c) = x {
(*a, *c)
} else {
panic!();
}
})
.collect::<Vec<_>>(),
),
};
}
world
}
fn setup_ideal(data: &[Variants]) -> (Vec<(A, B)>, Vec<(A, C)>) {
let mut ab = Vec::<(A, B)>::new();
let mut ac = Vec::<(A, C)>::new();
for v in data {
match v {
Variants::AB(a, b) => ab.push((*a, *b)),
Variants::AC(a, c) => ac.push((*a, *c)),
};
}
(ab, ac)
}
fn ideal(ab: &mut Vec<(A, B)>, ac: &mut Vec<(A, C)>) {
for (a, b) in ab.iter_mut() {
b.0 = a.0;
}
for (a, c) in ac.iter_mut() {
c.0 = a.0;
}
}
fn sequential(world: &mut World) {
for (mut b, a) in <(Write<B>, Read<A>)>::query().iter_mut(world) {
b.0 = a.0;
}
for (mut c, a) in <(Write<C>, Read<A>)>::query().iter_mut(world) {
c.0 = a.0;
}
}
fn parallel(world: &mut World) {
join(
|| unsafe {
for (mut b, a) in <(Write<B>, Read<A>)>::query().iter_unchecked(&world) {
b.0 = a.0;
}
},
|| unsafe {
for (mut c, a) in <(Write<C>, Read<A>)>::query().iter_unchecked(&world) {
c.0 = a.0;
}
},
);
}
fn par_for_each_mut(world: &mut World) {
join(
|| unsafe {
<(Write<B>, Read<A>)>::query().par_for_each_unchecked(&world, |(mut b, a)| {
b.0 = a.0;
});
},
|| unsafe {
<(Write<C>, Read<A>)>::query().par_for_each_unchecked(&world, |(mut c, a)| {
c.0 = a.0;
});
},
);
}
fn bench_ordered(c: &mut Criterion) {
c.bench(
"concurrent queries",
ParameterizedBenchmark::new(
"sequential ideal",
|b, n| {
let data = data(*n);
let (mut ab, mut ac) = setup_ideal(&data);
b.iter(|| ideal(&mut ab, &mut ac));
},
(1..11).map(|i| i * 1000),
)
.with_function("sequential", |b, n| {
let data = data(*n);
let mut world = setup(&data);
b.iter(|| sequential(&mut world));
})
.with_function("parallel", |b, n| {
let data = data(*n);
let mut world = setup(&data);
join(|| {}, || b.iter(|| parallel(&mut world)));
})
.with_function("par_for_each_mut", |b, n| {
let data = data(*n);
let mut world = setup(&data);
join(|| {}, || b.iter(|| par_for_each_mut(&mut world)));
}),
);
}
criterion_group!(iterate, bench_ordered);
criterion_main!(iterate);

View file

@ -1,111 +0,0 @@
use criterion::*;
use cgmath::prelude::*;
use cgmath::{vec3, Matrix4, Quaternion, Vector3};
use legion::prelude::*;
use rayon::join;
#[derive(Copy, Clone, Debug, PartialEq)]
struct Position(Vector3<f32>);
#[derive(Copy, Clone, Debug, PartialEq)]
struct Orientation(Quaternion<f32>);
#[derive(Copy, Clone, Debug, PartialEq)]
struct Scale(Vector3<f32>);
#[derive(Copy, Clone, Debug, PartialEq)]
struct Transform(Matrix4<f32>);
fn data(n: usize) -> Vec<(Position, Orientation, Scale, Transform)> {
let mut v = Vec::<(Position, Orientation, Scale, Transform)>::new();
for _ in 0..n {
v.push((
Position(vec3(0.0, 0.0, 0.0)),
Orientation(Quaternion::new(1.0, 0.0, 0.0, 0.0)),
Scale(vec3(0.0, 0.0, 0.0)),
Transform(Matrix4::identity()),
));
}
v
}
fn setup(data: Vec<(Position, Orientation, Scale, Transform)>) -> World {
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((), data);
world
}
fn process(
position: &Vector3<f32>,
orientation: &Quaternion<f32>,
scale: &Vector3<f32>,
) -> Matrix4<f32> {
let rot: Matrix4<f32> = (*orientation).into();
Matrix4::from_nonuniform_scale(scale.x, scale.y, scale.z)
* rot
* Matrix4::from_translation(*position)
}
fn ideal(data: &mut Vec<(Position, Orientation, Scale, Transform)>) {
for (pos, orient, scale, trans) in data.iter_mut() {
trans.0 = process(&pos.0, &orient.0, &scale.0);
}
}
fn sequential(world: &mut World) {
for (pos, orient, scale, mut trans) in <(
Read<Position>,
Read<Orientation>,
Read<Scale>,
Write<Transform>,
)>::query()
.iter_mut(world)
{
trans.0 = process(&pos.0, &orient.0, &scale.0);
}
}
fn par_for_each_mut(world: &mut World) {
<(
Read<Position>,
Read<Orientation>,
Read<Scale>,
Write<Transform>,
)>::query()
.par_for_each_mut(world, |(pos, orient, scale, mut trans)| {
trans.0 = process(&pos.0, &orient.0, &scale.0);
});
}
fn bench_transform(c: &mut Criterion) {
c.bench(
"update transform (experimental)",
ParameterizedBenchmark::new(
"ideal sequential",
|b, n| {
let mut data = data(*n);
b.iter(|| ideal(&mut data));
},
(1..11).map(|i| i * 1000),
)
.with_function("sequential", |b, n| {
let data = data(*n);
let mut world = setup(data);
b.iter(|| sequential(&mut world));
})
.with_function("par_for_each_mut", |b, n| {
let data = data(*n);
let mut world = setup(data);
join(|| {}, || b.iter(|| par_for_each_mut(&mut world)));
}),
);
}
criterion_group!(iterate, bench_transform);
criterion_main!(iterate);

View file

@ -1 +0,0 @@
type-complexity-threshold = 500

View file

@ -1,5 +0,0 @@
[book]
title = "The Legion Book"
multilingual = false
author = "Walter Pearce"
description = "Introduction to Legion ECS"

View file

@ -1,33 +0,0 @@
# Introduction
Welcome to the Legion book! This book is intended to be a summary overview of legion, including:
- An overview of how to use it
- Some examples of different use case scenarios
- how it is different than other Entity-Component-Systems in the rust ecosystem
- Overviews of some pertinent internals
This book assumes a general understanding of the concepts of the Entity-Component-System design and data composition as a design pattern. If you need a summary of what an ECS is, please see the [Wikipedia article on ECS].
## Design
Legions internal architecture is heavily inspired by the new Unity ECS architecture [^1], while the publicly facing API is strongly built upon specs [^2], while expanding on it and learning from many of the faults found in that API.
#### Quick Version
The core concept of Legion design is based around the concept of `Entities`, `Archetypes` and `Chunks`. These three core concepts are the building blocks of legion, and its entity component system.
##### Entities
Entities are strictly ID's, allocated within a given `Universe` of legion, which allow for uniquely referencing component instances. ID's may be reused generationally, but legion guarantees that they are unique in any given universe; this is accomplished by providing each `World` in a `Universe` its own Entity Allocator, which will be unique in that universe.
##### Archetypes
An Archetype is considered a "Grouping of Components and Tags". Entities may have varying numbers and types of components; any combination of these tags and components is considered an `Archetype`. In legion, entity storage and parallelization of system execution are all centered on this concept of Archetypes, or like-entities.
## Other resources
[^1]: https://docs.unity3d.com/Packages/com.unity.entities@0.1/manual/ecs_core.html
[^2]: https://github.com/amethyst/specs
[Wikipedia article on ECS]: https://en.wikipedia.org/wiki/Entity_component_system

View file

@ -1,5 +0,0 @@
# The Legion Book
- [Introduction](01_introduction.md)
- [Hello World](02_hello_world.md)

View file

@ -1,6 +0,0 @@
[workspace]
members = [
"hello_world",
"serialization",
]

View file

@ -1,9 +0,0 @@
[package]
name = "hello_world"
version = "0.1.0"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
edition = "2018"
[dependencies]
legion = { path = "../.." }
tracing-subscriber = "0.2"

View file

@ -1,111 +0,0 @@
use legion::prelude::*;
#[derive(Clone, Copy, Debug, PartialEq)]
struct Pos(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Vel(f32, f32, f32);
#[derive(Clone)]
pub struct ExampleResource1(String);
#[derive(Clone)]
pub struct ExampleResource2(String);
fn main() {
let _ = tracing_subscriber::fmt::try_init();
// create world
let universe = Universe::new();
let mut world = universe.create_world();
// Create resources
// Resources are also dynamically scheduled just like components, so the accesses
// declared within a SystemBuilder are correct.
// Any resource accessed by systems *must be* manually inserted beforehand, otherwise it will panic.
let mut resources = Resources::default();
resources.insert(ExampleResource1("ExampleResource1".to_string()));
resources.insert(ExampleResource2("ExampleResource2".to_string()));
// create entities
// An insert call is used to insert matching entities into the world.
let entities = world
.insert(
(),
vec![
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
],
)
.to_vec();
// update positions
// This example shows the use of a `iter`, which is default mutable, across a query.
let query = <(Write<Pos>, Read<Vel>)>::query();
for (mut pos, vel) in query.iter_mut(&mut world) {
pos.0 += vel.0;
pos.1 += vel.1;
pos.2 += vel.2;
}
// update positions using a system
let update_positions = SystemBuilder::new("update_positions")
.write_resource::<ExampleResource1>()
.read_resource::<ExampleResource2>()
.with_query(<(Write<Pos>, Read<Vel>)>::query())
.build(|_, mut world, (res1, res2), query| {
res1.0 = res2.0.clone(); // Write the mutable resource from the immutable resource
for (mut pos, vel) in query.iter_mut(&mut world) {
pos.0 += vel.0;
pos.1 += vel.1;
pos.2 += vel.2;
}
});
// Uses the command buffer to insert an entity into the world every frame.
let entity = entities[0];
let command_buffer_usage = SystemBuilder::new("command_buffer_usage")
.read_resource::<ExampleResource1>()
.write_resource::<ExampleResource2>()
// Read and write component definitions allow us to declare access to a component across all archetypes
// This means we can use the SubWorld provided to the system as a `World` for that component.
.write_component::<Pos>()
.build(move |command_buffer, world, (res1, res2), _| {
res2.0 = res1.0.clone(); // Write the mutable resource from the immutable resource
// Read a component from the SubWorld.
let _ = world.get_component_mut::<Pos>(entity).unwrap();
let _entities = command_buffer.insert(
(),
vec![
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
],
);
});
let thread_local_example = Box::new(|world: &mut World, _resources: &mut Resources| {
// This is an example of a thread local system which has full, exclusive mutable access to the world.
let query = <(Write<Pos>, Read<Vel>)>::query();
for (mut pos, vel) in query.iter_mut(world) {
pos.0 += vel.0;
pos.1 += vel.1;
pos.2 += vel.2;
}
});
let mut schedule = Schedule::builder()
.add_system(update_positions)
.add_system(command_buffer_usage)
// This flushes all command buffers of all systems.
.flush()
// a thread local system or function will wait for all previous systems to finish running,
// and then take exclusive access of the world.
.add_thread_local_fn(thread_local_example)
.build();
// Execute a frame of the schedule.
schedule.execute(&mut world, &mut resources);
}

View file

@ -1,14 +0,0 @@
[package]
name = "serialization"
version = "0.1.0"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
edition = "2018"
[dependencies]
legion = { path = "../..", features = ["serialize"] }
tracing-subscriber = "0.2"
serde_json = "1.0"
type-uuid = "0.1"
erased-serde = "0.3"
serde = { version = "1", features = ["derive"]}
uuid = { version = "0.8", features = ["v4"] }

View file

@ -1,468 +0,0 @@
use legion::{
entity::EntityAllocator,
prelude::*,
storage::{
ArchetypeDescription, ComponentMeta, ComponentResourceSet, ComponentTypeId, TagMeta,
TagStorage, TagTypeId,
},
};
use serde::{
de::{self, DeserializeSeed, IgnoredAny, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{any::TypeId, cell::RefCell, collections::HashMap, marker::PhantomData, ptr::NonNull};
use type_uuid::TypeUuid;
#[derive(TypeUuid, Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
#[uuid = "5fd8256d-db36-4fe2-8211-c7b3446e1927"]
struct Pos(f32, f32, f32);
#[derive(TypeUuid, Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
#[uuid = "14dec17f-ae14-40a3-8e44-e487fc423287"]
struct Vel(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Unregistered(f32, f32, f32);
struct ComponentDeserializer<'de, T: Deserialize<'de>> {
ptr: *mut T,
_marker: PhantomData<&'de T>,
}
impl<'de, T: Deserialize<'de> + 'static> DeserializeSeed<'de> for ComponentDeserializer<'de, T> {
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
let value = <T as Deserialize<'de>>::deserialize(deserializer)?;
unsafe {
std::ptr::write(self.ptr, value);
}
Ok(())
}
}
struct ComponentSeqDeserializer<'a, T> {
get_next_storage_fn: &'a mut dyn FnMut() -> Option<(NonNull<u8>, usize)>,
_marker: PhantomData<T>,
}
impl<'de, 'a, T: for<'b> Deserialize<'b> + 'static> DeserializeSeed<'de>
for ComponentSeqDeserializer<'a, T>
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(self)
}
}
impl<'de, 'a, T: for<'b> Deserialize<'b> + 'static> Visitor<'de>
for ComponentSeqDeserializer<'a, T>
{
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("sequence of objects")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let size = seq.size_hint();
for _ in 0..size.unwrap_or(std::usize::MAX) {
match (self.get_next_storage_fn)() {
Some((storage_ptr, storage_len)) => {
let storage_ptr = storage_ptr.as_ptr() as *mut T;
for idx in 0..storage_len {
let element_ptr = unsafe { storage_ptr.add(idx) };
if seq
.next_element_seed(ComponentDeserializer {
ptr: element_ptr,
_marker: PhantomData,
})?
.is_none()
{
panic!(
"expected {} elements in chunk but only {} found",
storage_len, idx
);
}
}
}
None => {
if seq.next_element::<IgnoredAny>()?.is_some() {
panic!("unexpected element when there was no storage space available");
} else {
// No more elements and no more storage - that's what we want!
break;
}
}
}
}
Ok(())
}
}
#[derive(Clone)]
struct TagRegistration {
uuid: type_uuid::Bytes,
ty: TypeId,
tag_serialize_fn: fn(&TagStorage, &mut dyn FnMut(&dyn erased_serde::Serialize)),
tag_deserialize_fn: fn(
deserializer: &mut dyn erased_serde::Deserializer,
&mut TagStorage,
) -> Result<(), erased_serde::Error>,
register_tag_fn: fn(&mut ArchetypeDescription),
}
impl TagRegistration {
fn of<
T: TypeUuid
+ Serialize
+ for<'de> Deserialize<'de>
+ PartialEq
+ Clone
+ Send
+ Sync
+ 'static,
>() -> Self {
Self {
uuid: T::UUID,
ty: TypeId::of::<T>(),
tag_serialize_fn: |tag_storage, serialize_fn| {
// it's safe because we know this is the correct type due to lookup
let slice = unsafe { tag_storage.data_slice::<T>() };
serialize_fn(&&*slice);
},
tag_deserialize_fn: |deserializer, tag_storage| {
// TODO implement visitor to avoid allocation of Vec
let tag_vec = <Vec<T> as Deserialize>::deserialize(deserializer)?;
for tag in tag_vec {
// Tag types should line up, making this safe
unsafe {
tag_storage.push(tag);
}
}
Ok(())
},
register_tag_fn: |desc| {
desc.register_tag::<T>();
},
}
}
}
#[derive(Clone)]
struct ComponentRegistration {
uuid: type_uuid::Bytes,
ty: TypeId,
comp_serialize_fn: fn(&ComponentResourceSet, &mut dyn FnMut(&dyn erased_serde::Serialize)),
comp_deserialize_fn: fn(
deserializer: &mut dyn erased_serde::Deserializer,
get_next_storage_fn: &mut dyn FnMut() -> Option<(NonNull<u8>, usize)>,
) -> Result<(), erased_serde::Error>,
register_comp_fn: fn(&mut ArchetypeDescription),
}
impl ComponentRegistration {
fn of<T: TypeUuid + Serialize + for<'de> Deserialize<'de> + Send + Sync + 'static>() -> Self {
Self {
uuid: T::UUID,
ty: TypeId::of::<T>(),
comp_serialize_fn: |comp_storage, serialize_fn| {
// it's safe because we know this is the correct type due to lookup
let slice = unsafe { comp_storage.data_slice::<T>() };
serialize_fn(&*slice);
},
comp_deserialize_fn: |deserializer, get_next_storage_fn| {
let comp_seq_deser = ComponentSeqDeserializer::<T> {
get_next_storage_fn,
_marker: PhantomData,
};
comp_seq_deser.deserialize(deserializer)?;
Ok(())
},
register_comp_fn: |desc| {
desc.register_component::<T>();
},
}
}
}
#[derive(Serialize, Deserialize)]
struct SerializedArchetypeDescription {
tag_types: Vec<type_uuid::Bytes>,
component_types: Vec<type_uuid::Bytes>,
}
struct SerializeImpl {
tag_types: HashMap<TypeId, TagRegistration>,
comp_types: HashMap<TypeId, ComponentRegistration>,
entity_map: RefCell<HashMap<Entity, uuid::Bytes>>,
}
impl legion::serialize::ser::WorldSerializer for SerializeImpl {
fn can_serialize_tag(&self, ty: &TagTypeId, _meta: &TagMeta) -> bool {
self.tag_types.get(&ty.0).is_some()
}
fn can_serialize_component(&self, ty: &ComponentTypeId, _meta: &ComponentMeta) -> bool {
self.comp_types.get(&ty.0).is_some()
}
fn serialize_archetype_description<S: Serializer>(
&self,
serializer: S,
archetype_desc: &ArchetypeDescription,
) -> Result<S::Ok, S::Error> {
let tags_to_serialize = archetype_desc
.tags()
.iter()
.filter_map(|(ty, _)| self.tag_types.get(&ty.0))
.map(|reg| reg.uuid)
.collect::<Vec<_>>();
let components_to_serialize = archetype_desc
.components()
.iter()
.filter_map(|(ty, _)| self.comp_types.get(&ty.0))
.map(|reg| reg.uuid)
.collect::<Vec<_>>();
SerializedArchetypeDescription {
tag_types: tags_to_serialize,
component_types: components_to_serialize,
}
.serialize(serializer)
}
fn serialize_components<S: Serializer>(
&self,
serializer: S,
component_type: &ComponentTypeId,
_component_meta: &ComponentMeta,
components: &ComponentResourceSet,
) -> Result<S::Ok, S::Error> {
if let Some(reg) = self.comp_types.get(&component_type.0) {
let result = RefCell::new(None);
let serializer = RefCell::new(Some(serializer));
{
let mut result_ref = result.borrow_mut();
(reg.comp_serialize_fn)(components, &mut |serialize| {
result_ref.replace(erased_serde::serialize(
serialize,
serializer.borrow_mut().take().unwrap(),
));
});
}
return result.borrow_mut().take().unwrap();
}
panic!(
"received unserializable type {:?}, this should be filtered by can_serialize",
component_type
);
}
fn serialize_tags<S: Serializer>(
&self,
serializer: S,
tag_type: &TagTypeId,
_tag_meta: &TagMeta,
tags: &TagStorage,
) -> Result<S::Ok, S::Error> {
if let Some(reg) = self.tag_types.get(&tag_type.0) {
let result = RefCell::new(None);
let serializer = RefCell::new(Some(serializer));
{
let mut result_ref = result.borrow_mut();
(reg.tag_serialize_fn)(tags, &mut |serialize| {
result_ref.replace(erased_serde::serialize(
serialize,
serializer.borrow_mut().take().unwrap(),
));
});
}
return result.borrow_mut().take().unwrap();
}
panic!(
"received unserializable type {:?}, this should be filtered by can_serialize",
tag_type
);
}
fn serialize_entities<S: Serializer>(
&self,
serializer: S,
entities: &[Entity],
) -> Result<S::Ok, S::Error> {
let mut uuid_map = self.entity_map.borrow_mut();
serializer.collect_seq(entities.iter().map(|e| {
*uuid_map
.entry(*e)
.or_insert_with(|| *uuid::Uuid::new_v4().as_bytes())
}))
}
}
struct DeserializeImpl {
tag_types: HashMap<TypeId, TagRegistration>,
comp_types: HashMap<TypeId, ComponentRegistration>,
tag_types_by_uuid: HashMap<type_uuid::Bytes, TagRegistration>,
comp_types_by_uuid: HashMap<type_uuid::Bytes, ComponentRegistration>,
entity_map: RefCell<HashMap<uuid::Bytes, Entity>>,
}
impl legion::serialize::de::WorldDeserializer for DeserializeImpl {
fn deserialize_archetype_description<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
) -> Result<ArchetypeDescription, <D as Deserializer<'de>>::Error> {
let serialized_desc =
<SerializedArchetypeDescription as Deserialize>::deserialize(deserializer)?;
let mut desc = ArchetypeDescription::default();
for tag in serialized_desc.tag_types {
if let Some(reg) = self.tag_types_by_uuid.get(&tag) {
(reg.register_tag_fn)(&mut desc);
}
}
for comp in serialized_desc.component_types {
if let Some(reg) = self.comp_types_by_uuid.get(&comp) {
(reg.register_comp_fn)(&mut desc);
}
}
Ok(desc)
}
fn deserialize_components<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
component_type: &ComponentTypeId,
_component_meta: &ComponentMeta,
get_next_storage_fn: &mut dyn FnMut() -> Option<(NonNull<u8>, usize)>,
) -> Result<(), <D as Deserializer<'de>>::Error> {
if let Some(reg) = self.comp_types.get(&component_type.0) {
let mut erased = erased_serde::Deserializer::erase(deserializer);
(reg.comp_deserialize_fn)(&mut erased, get_next_storage_fn)
.map_err(<<D as serde::Deserializer<'de>>::Error as serde::de::Error>::custom)?;
} else {
<IgnoredAny>::deserialize(deserializer)?;
}
Ok(())
}
fn deserialize_tags<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
tag_type: &TagTypeId,
_tag_meta: &TagMeta,
tags: &mut TagStorage,
) -> Result<(), <D as Deserializer<'de>>::Error> {
if let Some(reg) = self.tag_types.get(&tag_type.0) {
let mut erased = erased_serde::Deserializer::erase(deserializer);
(reg.tag_deserialize_fn)(&mut erased, tags)
.map_err(<<D as serde::Deserializer<'de>>::Error as serde::de::Error>::custom)?;
} else {
<IgnoredAny>::deserialize(deserializer)?;
}
Ok(())
}
fn deserialize_entities<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
entity_allocator: &EntityAllocator,
entities: &mut Vec<Entity>,
) -> Result<(), <D as Deserializer<'de>>::Error> {
let entity_uuids = <Vec<uuid::Bytes> as Deserialize>::deserialize(deserializer)?;
let mut entity_map = self.entity_map.borrow_mut();
for id in entity_uuids {
let entity = entity_allocator.create_entity();
entity_map.insert(id, entity);
entities.push(entity);
}
Ok(())
}
}
fn main() {
// create world
let universe = Universe::new();
let mut world = universe.create_world();
// Pos and Vel are both serializable, so all components in this chunkset will be serialized
world.insert(
(),
vec![
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
(Pos(1., 2., 3.), Vel(1., 2., 3.)),
],
);
// Unserializable components are not serialized, so only the Pos components should be serialized in this chunkset
for _ in 0..1000 {
world.insert(
(Pos(4., 5., 6.), Unregistered(4., 5., 6.)),
vec![
(Pos(1., 2., 3.), Unregistered(4., 5., 6.)),
(Pos(1., 2., 3.), Unregistered(4., 5., 6.)),
(Pos(1., 2., 3.), Unregistered(4., 5., 6.)),
(Pos(1., 2., 3.), Unregistered(4., 5., 6.)),
],
);
}
// Entities with no serializable components are not serialized, so this entire chunkset should be skipped in the output
world.insert(
(Unregistered(4., 5., 6.),),
vec![(Unregistered(4., 5., 6.),), (Unregistered(4., 5., 6.),)],
);
let comp_registrations = [
ComponentRegistration::of::<Pos>(),
ComponentRegistration::of::<Vel>(),
];
let tag_registrations = [TagRegistration::of::<Pos>(), TagRegistration::of::<Vel>()];
use std::iter::FromIterator;
let ser_helper = SerializeImpl {
comp_types: HashMap::from_iter(comp_registrations.iter().map(|reg| (reg.ty, reg.clone()))),
tag_types: HashMap::from_iter(tag_registrations.iter().map(|reg| (reg.ty, reg.clone()))),
entity_map: RefCell::new(HashMap::new()),
};
let serializable = legion::serialize::ser::serializable_world(&world, &ser_helper);
let serialized_data = serde_json::to_string(&serializable).unwrap();
let de_helper = DeserializeImpl {
tag_types_by_uuid: HashMap::from_iter(
ser_helper
.tag_types
.iter()
.map(|reg| (reg.1.uuid, reg.1.clone())),
),
comp_types_by_uuid: HashMap::from_iter(
ser_helper
.comp_types
.iter()
.map(|reg| (reg.1.uuid, reg.1.clone())),
),
tag_types: ser_helper.tag_types,
comp_types: ser_helper.comp_types,
// re-use the entity-uuid mapping
entity_map: RefCell::new(HashMap::from_iter(
ser_helper
.entity_map
.into_inner()
.into_iter()
.map(|(e, uuid)| (uuid, e)),
)),
};
let mut deserialized_world = universe.create_world();
let mut deserializer = serde_json::Deserializer::from_str(&serialized_data);
legion::serialize::de::deserialize(&mut deserialized_world, &de_helper, &mut deserializer)
.unwrap();
let ser_helper = SerializeImpl {
tag_types: de_helper.tag_types,
comp_types: de_helper.comp_types,
// re-use the entity-uuid mapping
entity_map: RefCell::new(HashMap::from_iter(
de_helper
.entity_map
.into_inner()
.into_iter()
.map(|(uuid, e)| (e, uuid)),
)),
};
let serializable = legion::serialize::ser::serializable_world(&deserialized_world, &ser_helper);
let roundtrip_data = serde_json::to_string(&serializable).unwrap();
assert_eq!(roundtrip_data, serialized_data);
}

View file

@ -1,45 +0,0 @@
[package]
name = "legion-core"
version = "0.2.1"
description = "High performance entity component system (ECS) library"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
repository = "https://github.com/TomGillen/legion"
keywords = ["ecs", "game"]
categories = ["game-engines", "data-structures"]
readme = "readme.md"
license = "MIT"
edition = "2018"
[badges]
travis-ci = { repository = "TomGillen/legion", branch = "master" }
[features]
par-iter = ["rayon"]
ffi = []
serialize = ["serde"]
[dependencies]
parking_lot = "0.10"
downcast-rs = "1.0"
itertools = "0.8"
rayon = { version = "1.2", optional = true }
crossbeam-queue = { version = "0.2.0", optional = true }
crossbeam-channel = "0.4.0"
derivative = "1"
smallvec = "1.2"
tracing = "0.1"
metrics = { version = "0.12", optional = true }
serde = { version = "1", optional = true }
fxhash = "0.2"
thiserror = "1.0"
[dev-dependencies]
tracing-subscriber = "0.2"
serde_json = "1.0"
type-uuid = "0.1"
erased-serde = "0.3"
serde = { version = "1", features = ["derive"]}
uuid = { version = "0.8", features = ["v4"] }
tracing = "0.1"
itertools = "0.8"
rayon = "1.2"

View file

@ -1,711 +0,0 @@
//! Atomic runtime borrow checking module.
//! These types implement something akin to `RefCell`, but are atomically handled allowing them to
//! cross thread boundaries.
use std::any::{type_name, Any};
use std::cell::UnsafeCell;
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::ops::DerefMut;
use std::sync::atomic::AtomicIsize;
#[cfg(not(debug_assertions))]
use std::marker::PhantomData;
pub trait DowncastTypename {
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T>;
fn downcast_typename_ref<T: Any>(&self) -> Option<&T>;
fn is_typename<T: Any>(&self) -> bool;
}
pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str { type_name::<T>() }
/// A `RefCell` implementation which is thread safe. This type performs all the standard runtime
/// borrow checking which would be familiar from using `RefCell`.
///
/// `UnsafeCell` is used in this type, but borrow checking is performed using atomic values,
/// garunteeing safe access across threads.
///
/// # Safety
/// Runtime borrow checking is only conducted in builds with `debug_assertions` enabled. Release
/// builds assume proper resource access and will cause undefined behavior with improper use.
pub struct AtomicRefCell<T> {
value: UnsafeCell<T>,
borrow_state: AtomicIsize,
}
impl<T: Default> Default for AtomicRefCell<T> {
fn default() -> Self { Self::new(T::default()) }
}
impl<T: std::fmt::Debug> std::fmt::Debug for AtomicRefCell<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({:?}) {:?}", self.borrow_state, self.value)
}
}
impl<T> AtomicRefCell<T> {
pub fn new(value: T) -> Self {
AtomicRefCell {
value: UnsafeCell::from(value),
borrow_state: AtomicIsize::from(0),
}
}
/// Retrieve an immutable `Ref` wrapped reference of `&T`.
///
/// # Panics
///
/// This method panics if this value is already mutably borrowed.
///
/// # Safety
/// Runtime borrow checking is only conducted in builds with `debug_assertions` enabled. Release
/// builds assume proper resource access and will cause undefined behavior with improper use.
#[inline(always)]
pub fn get(&self) -> Ref<T> { self.try_get().unwrap() }
/// Unwrap the value from the RefCell and kill it, returning the value.
pub fn into_inner(self) -> T { self.value.into_inner() }
/// Retrieve an immutable `Ref` wrapped reference of `&T`. This is the safe version of `get`
/// providing an error result on failure.
///
/// # Returns
///
/// `Some(T)` if the value can be retrieved.
/// `Err` if the value is already mutably borrowed.
#[cfg(debug_assertions)]
pub fn try_get(&self) -> Result<Ref<T>, String> {
loop {
let read = self.borrow_state.load(std::sync::atomic::Ordering::SeqCst);
if read < 0 {
return Err(format!(
"resource already borrowed as mutable: {}",
std::any::type_name::<T>()
));
}
if self.borrow_state.compare_and_swap(
read,
read + 1,
std::sync::atomic::Ordering::SeqCst,
) == read
{
break;
}
}
Ok(Ref::new(Shared::new(&self.borrow_state), unsafe {
&*self.value.get()
}))
}
/// Retrieve an immutable `Ref` wrapped reference of `&T`. This is the safe version of `get`
/// providing an error result on failure.
///
/// # Returns
///
/// `Some(T)` if the value can be retrieved.
/// `Err` if the value is already mutably borrowed.
///
/// # Safety
///
/// This release version of this function does not perform runtime borrow checking and will
/// cause undefined behavior if borrow rules are violated. This means they should be enforced
/// on the use of this type.
#[cfg(not(debug_assertions))]
#[inline(always)]
pub fn try_get(&self) -> Result<Ref<T>, &'static str> {
Ok(Ref::new(Shared::new(&self.borrow_state), unsafe {
&*self.value.get()
}))
}
/// Retrieve an mutable `RefMut` wrapped reference of `&mut T`.
///
/// # Panics
///
/// This method panics if this value is already mutably borrowed.
///
/// # Safety
/// Runtime borrow checking is only conducted in builds with `debug_assertions` enabled. Release
/// builds assume proper resource access and will cause undefined behavior with improper use.
#[inline(always)]
pub fn get_mut(&self) -> RefMut<T> { self.try_get_mut().unwrap() }
/// Retrieve a mutable `RefMut` wrapped reference of `&mut T`. This is the safe version of
/// `get_mut` providing an error result on failure.
///
/// # Returns
///
/// `Some(T)` if the value can be retrieved.
/// `Err` if the value is already mutably borrowed.
///
/// # Safety
///
/// This release version of this function does not perform runtime borrow checking and will
/// cause undefined behavior if borrow rules are violated. This means they should be enforced
/// on the use of this type.
#[cfg(debug_assertions)]
pub fn try_get_mut(&self) -> Result<RefMut<T>, String> {
let borrowed =
self.borrow_state
.compare_and_swap(0, -1, std::sync::atomic::Ordering::SeqCst);
match borrowed {
0 => Ok(RefMut::new(Exclusive::new(&self.borrow_state), unsafe {
&mut *self.value.get()
})),
x if x < 0 => Err(format!(
"resource already borrowed as mutable: {}",
std::any::type_name::<T>()
)),
_ => Err(format!(
"resource already borrowed as immutable: {}",
std::any::type_name::<T>()
)),
}
}
/// Retrieve a mutable `RefMut` wrapped reference of `&mut T`. This is the safe version of
/// `get_mut` providing an error result on failure.
///
/// # Returns
///
/// `Some(T)` if the value can be retrieved.
/// `Err` if the value is already mutably borrowed.
///
/// # Safety
///
/// This release version of this function does not perform runtime borrow checking and will
/// cause undefined behavior if borrow rules are violated. This means they should be enforced
/// on the use of this type.
#[cfg(not(debug_assertions))]
#[inline(always)]
pub fn try_get_mut(&self) -> Result<RefMut<T>, &'static str> {
Ok(RefMut::new(Exclusive::new(&self.borrow_state), unsafe {
&mut *self.value.get()
}))
}
}
unsafe impl<T: Send> Send for AtomicRefCell<T> {}
unsafe impl<T: Sync> Sync for AtomicRefCell<T> {}
/// Type used for allowing unsafe cloning of internal types
pub trait UnsafeClone {
/// Clone this type unsafely
///
/// # Safety
/// Types implementing this trait perform clones under an unsafe context.
unsafe fn clone(&self) -> Self;
}
impl<A: UnsafeClone, B: UnsafeClone> UnsafeClone for (A, B) {
unsafe fn clone(&self) -> Self { (self.0.clone(), self.1.clone()) }
}
#[derive(Debug)]
pub struct Shared<'a> {
#[cfg(debug_assertions)]
state: &'a AtomicIsize,
#[cfg(not(debug_assertions))]
state: PhantomData<&'a ()>,
}
impl<'a> Shared<'a> {
#[cfg(debug_assertions)]
fn new(state: &'a AtomicIsize) -> Self { Self { state } }
#[cfg(not(debug_assertions))]
#[inline(always)]
fn new(_: &'a AtomicIsize) -> Self { Self { state: PhantomData } }
}
#[cfg(debug_assertions)]
impl<'a> Drop for Shared<'a> {
fn drop(&mut self) { self.state.fetch_sub(1, std::sync::atomic::Ordering::SeqCst); }
}
impl<'a> Clone for Shared<'a> {
#[inline(always)]
fn clone(&self) -> Self {
#[cfg(debug_assertions)]
self.state.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
Shared { state: self.state }
}
}
impl<'a> UnsafeClone for Shared<'a> {
unsafe fn clone(&self) -> Self { Clone::clone(&self) }
}
#[derive(Debug)]
pub struct Exclusive<'a> {
#[cfg(debug_assertions)]
state: &'a AtomicIsize,
#[cfg(not(debug_assertions))]
state: PhantomData<&'a ()>,
}
impl<'a> Exclusive<'a> {
#[cfg(debug_assertions)]
fn new(state: &'a AtomicIsize) -> Self { Self { state } }
#[cfg(not(debug_assertions))]
#[inline(always)]
fn new(_: &'a AtomicIsize) -> Self { Self { state: PhantomData } }
}
#[cfg(debug_assertions)]
impl<'a> Drop for Exclusive<'a> {
fn drop(&mut self) { self.state.fetch_add(1, std::sync::atomic::Ordering::SeqCst); }
}
impl<'a> UnsafeClone for Exclusive<'a> {
#[inline(always)]
unsafe fn clone(&self) -> Self {
#[cfg(debug_assertions)]
self.state.fetch_sub(1, std::sync::atomic::Ordering::SeqCst);
Exclusive { state: self.state }
}
}
#[derive(Debug)]
pub struct Ref<'a, T: 'a> {
#[allow(dead_code)]
// held for drop impl
borrow: Shared<'a>,
value: &'a T,
}
impl<'a, T: 'a> Clone for Ref<'a, T> {
#[inline(always)]
fn clone(&self) -> Self { Ref::new(Clone::clone(&self.borrow), self.value) }
}
impl<'a, T: 'a> Ref<'a, T> {
#[inline(always)]
pub fn new(borrow: Shared<'a>, value: &'a T) -> Self { Self { borrow, value } }
#[inline(always)]
pub fn map_into<K: 'a, F: FnMut(&'a T) -> K>(self, mut f: F) -> RefMap<'a, K> {
RefMap::new(self.borrow, f(&self.value))
}
#[inline(always)]
pub fn map<K: 'a, F: FnMut(&T) -> &K>(&self, mut f: F) -> Ref<'a, K> {
Ref::new(Clone::clone(&self.borrow), f(&self.value))
}
/// Deconstructs this mapped borrow to its underlying borrow state and value.
///
/// # Safety
///
/// Ensure that you still follow all safety guidelines of this mapped ref.
#[inline(always)]
pub unsafe fn deconstruct(self) -> (Shared<'a>, &'a T) { (self.borrow, self.value) }
}
impl<'a, T: 'a> Deref for Ref<'a, T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &Self::Target { self.value }
}
impl<'a, T: 'a> AsRef<T> for Ref<'a, T> {
#[inline(always)]
fn as_ref(&self) -> &T { self.value }
}
impl<'a, T: 'a> std::borrow::Borrow<T> for Ref<'a, T> {
#[inline(always)]
fn borrow(&self) -> &T { self.value }
}
impl<'a, T> PartialEq for Ref<'a, T>
where
T: 'a + PartialEq,
{
fn eq(&self, other: &Self) -> bool { self.value == other.value }
}
impl<'a, T> Eq for Ref<'a, T> where T: 'a + Eq {}
impl<'a, T> PartialOrd for Ref<'a, T>
where
T: 'a + PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.value.partial_cmp(&other.value)
}
}
impl<'a, T> Ord for Ref<'a, T>
where
T: 'a + Ord,
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.value.cmp(&other.value) }
}
impl<'a, T> Hash for Ref<'a, T>
where
T: 'a + Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) { self.value.hash(state); }
}
#[derive(Debug)]
pub struct RefMut<'a, T: 'a> {
#[allow(dead_code)]
// held for drop impl
borrow: Exclusive<'a>,
value: &'a mut T,
}
impl<'a, T: 'a> RefMut<'a, T> {
#[inline(always)]
pub fn new(borrow: Exclusive<'a>, value: &'a mut T) -> Self { Self { borrow, value } }
#[inline(always)]
pub fn map_into<K: 'a, F: FnMut(&mut T) -> K>(mut self, mut f: F) -> RefMapMut<'a, K> {
RefMapMut::new(self.borrow, f(&mut self.value))
}
/// Deconstructs this mapped borrow to its underlying borrow state and value.
///
/// # Safety
///
/// Ensure that you still follow all safety guidelines of this mapped ref.
#[inline(always)]
pub unsafe fn deconstruct(self) -> (Exclusive<'a>, &'a mut T) { (self.borrow, self.value) }
#[inline(always)]
pub fn split<First, Rest, F: Fn(&'a mut T) -> (&'a mut First, &'a mut Rest)>(
self,
f: F,
) -> (RefMut<'a, First>, RefMut<'a, Rest>) {
let (first, rest) = f(self.value);
(
RefMut::new(unsafe { self.borrow.clone() }, first),
RefMut::new(self.borrow, rest),
)
}
}
impl<'a, T: 'a> Deref for RefMut<'a, T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &Self::Target { self.value }
}
impl<'a, T: 'a> DerefMut for RefMut<'a, T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target { self.value }
}
impl<'a, T: 'a> AsRef<T> for RefMut<'a, T> {
#[inline(always)]
fn as_ref(&self) -> &T { self.value }
}
impl<'a, T: 'a> std::borrow::Borrow<T> for RefMut<'a, T> {
#[inline(always)]
fn borrow(&self) -> &T { self.value }
}
impl<'a, T> PartialEq for RefMut<'a, T>
where
T: 'a + PartialEq,
{
fn eq(&self, other: &Self) -> bool { self.value == other.value }
}
impl<'a, T> Eq for RefMut<'a, T> where T: 'a + Eq {}
impl<'a, T> PartialOrd for RefMut<'a, T>
where
T: 'a + PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.value.partial_cmp(&other.value)
}
}
impl<'a, T> Ord for RefMut<'a, T>
where
T: 'a + Ord,
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.value.cmp(&other.value) }
}
impl<'a, T> Hash for RefMut<'a, T>
where
T: 'a + Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) { self.value.hash(state); }
}
#[derive(Debug)]
pub struct RefMap<'a, T: 'a> {
#[allow(dead_code)]
// held for drop impl
borrow: Shared<'a>,
value: T,
}
impl<'a, T: 'a> RefMap<'a, T> {
#[inline(always)]
pub fn new(borrow: Shared<'a>, value: T) -> Self { Self { borrow, value } }
#[inline(always)]
pub fn map_into<K: 'a, F: FnMut(&mut T) -> K>(mut self, mut f: F) -> RefMap<'a, K> {
RefMap::new(self.borrow, f(&mut self.value))
}
/// Deconstructs this mapped borrow to its underlying borrow state and value.
///
/// # Safety
///
/// Ensure that you still follow all safety guidelines of this mapped ref.
#[inline(always)]
pub unsafe fn deconstruct(self) -> (Shared<'a>, T) { (self.borrow, self.value) }
}
impl<'a, T: 'a> Deref for RefMap<'a, T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &Self::Target { &self.value }
}
impl<'a, T: 'a> AsRef<T> for RefMap<'a, T> {
#[inline(always)]
fn as_ref(&self) -> &T { &self.value }
}
impl<'a, T: 'a> std::borrow::Borrow<T> for RefMap<'a, T> {
#[inline(always)]
fn borrow(&self) -> &T { &self.value }
}
#[derive(Debug)]
pub struct RefMapMut<'a, T: 'a> {
#[allow(dead_code)]
// held for drop impl
borrow: Exclusive<'a>,
value: T,
}
impl<'a, T: 'a> RefMapMut<'a, T> {
#[inline(always)]
pub fn new(borrow: Exclusive<'a>, value: T) -> Self { Self { borrow, value } }
#[inline(always)]
pub fn map_into<K: 'a, F: FnMut(&mut T) -> K>(mut self, mut f: F) -> RefMapMut<'a, K> {
RefMapMut {
value: f(&mut self.value),
borrow: self.borrow,
}
}
/// Deconstructs this mapped borrow to its underlying borrow state and value.
///
/// # Safety
///
/// Ensure that you still follow all safety guidelines of this mutable mapped ref.
#[inline(always)]
pub unsafe fn deconstruct(self) -> (Exclusive<'a>, T) { (self.borrow, self.value) }
}
impl<'a, T: 'a> Deref for RefMapMut<'a, T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &Self::Target { &self.value }
}
impl<'a, T: 'a> DerefMut for RefMapMut<'a, T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.value }
}
impl<'a, T: 'a> AsRef<T> for RefMapMut<'a, T> {
#[inline(always)]
fn as_ref(&self) -> &T { &self.value }
}
impl<'a, T: 'a> std::borrow::Borrow<T> for RefMapMut<'a, T> {
#[inline(always)]
fn borrow(&self) -> &T { &self.value }
}
#[derive(Debug)]
pub struct RefIter<'a, T: 'a, I: Iterator<Item = &'a T>> {
#[allow(dead_code)]
// held for drop impl
borrow: Shared<'a>,
iter: I,
}
impl<'a, T: 'a, I: Iterator<Item = &'a T>> RefIter<'a, T, I> {
#[inline(always)]
pub fn new(borrow: Shared<'a>, iter: I) -> Self { Self { borrow, iter } }
}
impl<'a, T: 'a, I: Iterator<Item = &'a T>> Iterator for RefIter<'a, T, I> {
type Item = Ref<'a, T>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.iter.next() {
Some(Ref::new(Clone::clone(&self.borrow), item))
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl<'a, T: 'a, I: Iterator<Item = &'a T> + ExactSizeIterator> ExactSizeIterator
for RefIter<'a, T, I>
{
}
#[derive(Debug)]
enum TryIter<State, T> {
Found { borrow: State, iter: T },
Missing(usize),
}
#[derive(Debug)]
pub struct TryRefIter<'a, T: 'a, I: Iterator<Item = &'a T>> {
inner: TryIter<Shared<'a>, I>,
}
impl<'a, T: 'a, I: Iterator<Item = &'a T>> TryRefIter<'a, T, I> {
#[inline(always)]
pub(crate) fn found(borrow: Shared<'a>, iter: I) -> Self {
Self {
inner: TryIter::Found { borrow, iter },
}
}
#[inline(always)]
pub(crate) fn missing(count: usize) -> Self {
Self {
inner: TryIter::Missing(count),
}
}
}
impl<'a, T: 'a, I: Iterator<Item = &'a T>> Iterator for TryRefIter<'a, T, I> {
type Item = Option<Ref<'a, T>>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
Some(match self.inner {
TryIter::Found {
ref borrow,
ref mut iter,
..
} => Some(Ref::new(Clone::clone(borrow), iter.next()?)),
TryIter::Missing(ref mut n) => {
*n = n.checked_sub(1)?;
None
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
match self.inner {
TryIter::Found { ref iter, .. } => iter.size_hint(),
TryIter::Missing(n) => (n, Some(n)),
}
}
}
impl<'a, T: 'a, I: Iterator<Item = &'a T> + ExactSizeIterator> ExactSizeIterator
for TryRefIter<'a, T, I>
{
}
#[derive(Debug)]
pub struct RefIterMut<'a, T: 'a, I: Iterator<Item = &'a mut T>> {
#[allow(dead_code)]
// held for drop impl
borrow: Exclusive<'a>,
iter: I,
}
impl<'a, T: 'a, I: Iterator<Item = &'a mut T>> RefIterMut<'a, T, I> {
#[inline(always)]
pub fn new(borrow: Exclusive<'a>, iter: I) -> Self { Self { borrow, iter } }
}
impl<'a, T: 'a, I: Iterator<Item = &'a mut T>> Iterator for RefIterMut<'a, T, I> {
type Item = RefMut<'a, T>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.iter.next() {
Some(RefMut::new(unsafe { self.borrow.clone() }, item))
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl<'a, T: 'a, I: Iterator<Item = &'a mut T> + ExactSizeIterator> ExactSizeIterator
for RefIterMut<'a, T, I>
{
}
#[derive(Debug)]
pub struct TryRefIterMut<'a, T: 'a, I: Iterator<Item = &'a mut T>> {
inner: TryIter<Exclusive<'a>, I>,
}
impl<'a, T: 'a, I: Iterator<Item = &'a mut T>> TryRefIterMut<'a, T, I> {
#[inline(always)]
pub(crate) fn found(borrow: Exclusive<'a>, iter: I) -> Self {
Self {
inner: TryIter::Found { borrow, iter },
}
}
#[inline(always)]
pub(crate) fn missing(count: usize) -> Self {
Self {
inner: TryIter::Missing(count),
}
}
}
impl<'a, T: 'a, I: Iterator<Item = &'a mut T>> Iterator for TryRefIterMut<'a, T, I> {
type Item = Option<RefMut<'a, T>>;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
Some(match self.inner {
TryIter::Found {
ref borrow,
ref mut iter,
..
} => Some(RefMut::new(unsafe { borrow.clone() }, iter.next()?)),
TryIter::Missing(ref mut n) => {
*n = n.checked_sub(1)?;
None
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
match self.inner {
TryIter::Found { ref iter, .. } => iter.size_hint(),
TryIter::Missing(n) => (n, Some(n)),
}
}
}
impl<'a, T: 'a, I: Iterator<Item = &'a mut T> + ExactSizeIterator> ExactSizeIterator
for TryRefIterMut<'a, T, I>
{
}

View file

@ -1,630 +0,0 @@
use crate::{
borrow::AtomicRefCell,
cons::{ConsAppend, ConsFlatten},
entity::{Entity, EntityAllocator},
filter::{ChunksetFilterData, Filter},
storage::{Component, ComponentTypeId, Tag, TagTypeId},
world::{
ComponentSource, ComponentTupleSet, IntoComponentSource, PreallocComponentSource,
TagLayout, TagSet, World, WorldId,
},
};
use derivative::Derivative;
use smallvec::SmallVec;
use std::ops::Range;
use std::{collections::VecDeque, iter::FromIterator, marker::PhantomData, sync::Arc};
use tracing::{span, Level};
/// This trait can be used to implement custom world writer types that can be directly
/// inserted into the command buffer, for more custom and complex world operations. This is analogous
/// to the `CommandBuffer::exec_mut` function type, but does not perform explicit any/any archetype
/// access.
pub trait WorldWritable {
/// Destructs the writer and performs the write operations on the world.
fn write(self: Arc<Self>, world: &mut World, cmd: &CommandBuffer);
/// Returns the list of `ComponentTypeId` which are written by this command buffer. This is leveraged
/// to allow parralel command buffer flushing.
fn write_components(&self) -> Vec<ComponentTypeId>;
/// Returns the list of `TagTypeId` which are written by this command buffer. This is leveraged
/// to allow parralel command buffer flushing.
fn write_tags(&self) -> Vec<TagTypeId>;
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct InsertBufferedCommand<T, C> {
write_components: Vec<ComponentTypeId>,
write_tags: Vec<TagTypeId>,
#[derivative(Debug = "ignore")]
tags: T,
#[derivative(Debug = "ignore")]
components: C,
entities: Range<usize>,
}
impl<T, C> WorldWritable for InsertBufferedCommand<T, C>
where
T: TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: ComponentSource,
{
fn write(self: Arc<Self>, world: &mut World, cmd: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
world.insert(
consumed.tags,
PreallocComponentSource::new(
cmd.pending_insertion[consumed.entities].iter().copied(),
consumed.components,
),
);
}
fn write_components(&self) -> Vec<ComponentTypeId> { self.write_components.clone() }
fn write_tags(&self) -> Vec<TagTypeId> { self.write_tags.clone() }
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct InsertCommand<T, C> {
write_components: Vec<ComponentTypeId>,
write_tags: Vec<TagTypeId>,
#[derivative(Debug = "ignore")]
tags: T,
#[derivative(Debug = "ignore")]
components: C,
}
impl<T, C> WorldWritable for InsertCommand<T, C>
where
T: TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: IntoComponentSource,
{
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
world.insert(consumed.tags, consumed.components);
}
fn write_components(&self) -> Vec<ComponentTypeId> { self.write_components.clone() }
fn write_tags(&self) -> Vec<TagTypeId> { self.write_tags.clone() }
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct DeleteEntityCommand(Entity);
impl WorldWritable for DeleteEntityCommand {
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) { world.delete(self.0); }
fn write_components(&self) -> Vec<ComponentTypeId> { Vec::with_capacity(0) }
fn write_tags(&self) -> Vec<TagTypeId> { Vec::with_capacity(0) }
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct AddTagCommand<T> {
entity: Entity,
#[derivative(Debug = "ignore")]
tag: T,
}
impl<T> WorldWritable for AddTagCommand<T>
where
T: Tag,
{
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
if let Err(err) = world.add_tag(consumed.entity, consumed.tag) {
tracing::error!(error = %err, "error adding tag");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { Vec::with_capacity(0) }
fn write_tags(&self) -> Vec<TagTypeId> { vec![TagTypeId::of::<T>()] }
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct RemoveTagCommand<T> {
entity: Entity,
_marker: PhantomData<T>,
}
impl<T> WorldWritable for RemoveTagCommand<T>
where
T: Tag,
{
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
if let Err(err) = world.remove_tag::<T>(self.entity) {
tracing::error!(error = %err, "error removing tag");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { Vec::with_capacity(0) }
fn write_tags(&self) -> Vec<TagTypeId> { vec![TagTypeId::of::<T>()] }
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct AddComponentCommand<C> {
#[derivative(Debug = "ignore")]
entity: Entity,
#[derivative(Debug = "ignore")]
component: C,
}
impl<C> WorldWritable for AddComponentCommand<C>
where
C: Component,
{
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
if let Err(err) = world.add_component::<C>(consumed.entity, consumed.component) {
tracing::error!(error = %err, "error adding component");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { vec![ComponentTypeId::of::<C>()] }
fn write_tags(&self) -> Vec<TagTypeId> { Vec::with_capacity(0) }
}
#[derive(Derivative)]
#[derivative(Debug(bound = ""))]
struct RemoveComponentCommand<C> {
entity: Entity,
_marker: PhantomData<C>,
}
impl<C> WorldWritable for RemoveComponentCommand<C>
where
C: Component,
{
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
if let Err(err) = world.remove_component::<C>(self.entity) {
tracing::error!(error = %err, "error removing component");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { vec![ComponentTypeId::of::<C>()] }
fn write_tags(&self) -> Vec<TagTypeId> { Vec::with_capacity(0) }
}
#[allow(clippy::enum_variant_names)]
enum EntityCommand {
WriteWorld(Arc<dyn WorldWritable>),
ExecWorld(Arc<dyn Fn(&World)>),
ExecMutWorld(Arc<dyn Fn(&mut World)>),
}
/// A builder type which can be retrieved from the command buffer. This is the ideal use case for
/// inserted complex entities with multiple components and tags from a command buffer. Although
/// `add_component` will perform a new move operation on every addition, this allows the construction
/// of a single `insert` command for an entity, but without using the actual `insert` command
/// provided by the `CommandBuffer`
///
/// # Examples
///
/// Inserting an entity using the `EntityBuilder`:
///
/// ```
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position(f32);
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Rotation(f32);
/// # let universe = Universe::new();
/// # let mut world = universe.create_world();
/// let mut command_buffer = CommandBuffer::new(&world);
/// command_buffer.start_entity()
/// .with_component(Position(123.0))
/// .with_component(Rotation(456.0))
/// .build();
/// command_buffer.write(&mut world);
/// ```
pub struct EntityBuilder<'a, TS = (), CS = ()> {
cmd: &'a mut CommandBuffer,
tags: TS,
components: CS,
}
impl<'a, TS, CS> EntityBuilder<'a, TS, CS>
where
TS: 'static + Send + ConsFlatten,
CS: 'static + Send + ConsFlatten,
{
/// Adds a component to this builder, returning a new builder type containing that component type
/// and its data.
pub fn with_component<C: Component>(
self,
component: C,
) -> EntityBuilder<'a, TS, <CS as ConsAppend<C>>::Output>
where
CS: ConsAppend<C>,
<CS as ConsAppend<C>>::Output: ConsFlatten,
{
EntityBuilder {
cmd: self.cmd,
components: ConsAppend::append(self.components, component),
tags: self.tags,
}
}
/// Adds a tag to this builder, returning a new builder type containing that component type
/// and its data.
pub fn with_tag<T: Tag>(self, tag: T) -> EntityBuilder<'a, <TS as ConsAppend<T>>::Output, CS>
where
TS: ConsAppend<T>,
<TS as ConsAppend<T>>::Output: ConsFlatten,
{
EntityBuilder {
cmd: self.cmd,
tags: ConsAppend::append(self.tags, tag),
components: self.components,
}
}
/// Finalizes this builder type and submits it to the `CommandBuffer`.
pub fn build(self) -> Entity
where
<TS as ConsFlatten>::Output: TagSet + TagLayout + for<'b> Filter<ChunksetFilterData<'b>>,
ComponentTupleSet<
<CS as ConsFlatten>::Output,
std::iter::Once<<CS as ConsFlatten>::Output>,
>: ComponentSource,
{
self.cmd.insert(
self.tags.flatten(),
std::iter::once(self.components.flatten()),
)[0]
}
}
/// A command buffer used to queue mutable changes to the world from a system. This buffer is automatically
/// flushed and refreshed at the beginning of every frame by `Schedule`. If `Schedule` is not used,
/// then the user needs to manually flush it by performing `CommandBuffer::write`.
///
/// This buffer operates as follows:
/// - All commands are queued as trait object of type `WorldWritable`, to be executed when `CommandBuffer:write` is called.
/// - Entities are allocated at the time of `CommandBuffer:write` occuring, being directly allocated from the world
/// and cached internally in the system. This upper cache size can be changed via `SystemBuilder::with_command_buffer_size`
/// for specific systems, or globally via `World::set_command_buffer_size`. In the event the cached entity count is exceeded,
/// the cache will be refilled on demand from the world `EntityAllocator`.
///
/// This behavior exists because `EntityAllocator` is a shared lock within the world, so in order to reduce lock contention with many
/// systems running and adding entities, the `CommandBuffer` will cache the configured number of entities - reducing contention.
///
/// # Examples
///
/// Inserting an entity using the `CommandBuffer`:
///
/// ```
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position(f32);
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Rotation(f32);
/// # let universe = Universe::new();
/// # let mut world = universe.create_world();
/// let mut command_buffer = CommandBuffer::new(&world);
/// let entity = command_buffer.start_entity().build();
///
/// command_buffer.add_component(entity, Position(123.0));
/// command_buffer.delete(entity);
///
/// command_buffer.write(&mut world);
/// ```
pub struct CommandBuffer {
world_id: WorldId,
commands: AtomicRefCell<VecDeque<EntityCommand>>,
entity_allocator: Arc<EntityAllocator>,
preallocated_capacity: usize,
free_list: SmallVec<[Entity; 64]>,
pending_insertion: SmallVec<[Entity; 64]>,
}
// This is safe because only 1 system in 1 execution is only ever accessing a command buffer
// and we gaurantee the write operations of a command buffer occur in a safe manner
unsafe impl Send for CommandBuffer {}
unsafe impl Sync for CommandBuffer {}
impl CommandBuffer {
/// Creates a `CommandBuffer` with a custom capacity of cached Entity's to be collected every frame.
/// Allocating a command buffer in this manner will override `World::set_command_buffer_size` and
/// this system will always allocate the custom provide capacity of entities every frame.
///
/// This constructor will preallocate the first round of entities needed from the world.
pub fn new_with_capacity(world: &World, capacity: usize) -> Self {
// Pull free entities from the world.
let free_list =
SmallVec::from_iter((0..capacity).map(|_| world.entity_allocator.create_entity()));
Self {
world_id: world.id(),
free_list,
preallocated_capacity: capacity,
commands: Default::default(),
pending_insertion: SmallVec::new(),
entity_allocator: world.entity_allocator.clone(),
}
}
/// Creates a `CommandBuffer` with a custom capacity of cached Entity's to be collected every frame.
/// Allocating a command buffer in this manner will use the default `World::set_command_buffer_size`
/// value.
///
/// This constructor will preallocate the first round of entities needed from the world.
pub fn new(world: &World) -> Self {
let free_list = SmallVec::from_iter(
(0..world.command_buffer_size()).map(|_| world.entity_allocator.create_entity()),
);
Self {
world_id: world.id(),
free_list,
preallocated_capacity: world.command_buffer_size(),
commands: Default::default(),
pending_insertion: SmallVec::new(),
entity_allocator: world.entity_allocator.clone(),
}
}
/// Gets the ID of the world this command buffer belongs to.
pub fn world(&self) -> WorldId { self.world_id }
/// Changes the cached capacity of this `CommandBuffer` to the specified capacity. This includes shrinking
/// and growing the allocated entities, and possibly returning them to the entity allocator in the
/// case of a shrink.
///
/// This function does *NOT* set the `CommandBuffer::custom_capacity` override.
#[allow(clippy::comparison_chain)]
fn resize(&mut self) {
let allocator = &self.entity_allocator;
let free_list = &mut self.free_list;
let capacity = self.preallocated_capacity;
if free_list.len() < capacity {
for entity in allocator.create_entities().take(capacity - free_list.len()) {
free_list.push(entity);
}
} else if free_list.len() > capacity {
// Free the entities
(free_list.len() - capacity..capacity).for_each(|_| {
allocator.delete_entity(free_list.pop().unwrap());
});
}
}
/// Flushes this command buffer, draining all stored commands and writing them to the world.
///
/// Command flushes are performed in a FIFO manner, allowing for reliable, linear commands being
/// executed in the order they were provided.
pub fn write(&mut self, world: &mut World) {
let span = span!(Level::TRACE, "Draining command buffer");
let _guard = span.enter();
if self.world_id != world.id() {
panic!("command buffers may only write into their parent world");
}
while let Some(command) = self.commands.get_mut().pop_back() {
match command {
EntityCommand::WriteWorld(ptr) => ptr.write(world, self),
EntityCommand::ExecMutWorld(closure) => closure(world),
EntityCommand::ExecWorld(closure) => closure(world),
}
}
self.pending_insertion.clear();
// Refill our entity buffer from the world
self.resize();
}
/// Creates an entity builder for constructing a new entity.
pub fn start_entity(&mut self) -> EntityBuilder<(), ()> {
EntityBuilder {
cmd: self,
tags: (),
components: (),
}
}
/// Allocates a new entity.
fn allocate_entity(&mut self) -> Entity {
if self.free_list.is_empty() {
self.resize();
}
let entity = self
.free_list
.pop()
.unwrap_or_else(|| self.entity_allocator.create_entity());
self.pending_insertion.push(entity);
entity
}
/// Executes an arbitrary closure against the mutable world, allowing for queued exclusive
/// access to the world.
pub fn exec_mut<F>(&self, f: F)
where
F: 'static + Fn(&mut World),
{
self.commands
.get_mut()
.push_front(EntityCommand::ExecMutWorld(Arc::new(f)));
}
/// Inserts an arbitrary implementor of the `WorldWritable` trait into the command queue.
/// This can be leveraged for creating custom `WorldWritable` trait implementors, and is used
/// internally for the default writers.
fn insert_writer<W>(&self, writer: W)
where
W: 'static + WorldWritable,
{
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(writer)));
}
/// Queues an insertion into the world. This command follows the same syntax as
/// the normal `World::insert`, returning the entities created for this command.
pub fn insert<T, C>(&mut self, tags: T, components: C) -> &[Entity]
where
T: 'static + TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: 'static + IntoComponentSource,
{
let components = components.into();
let start = self.pending_insertion.len();
let count = components.len();
self.pending_insertion.reserve(count);
for _ in 0..count {
self.allocate_entity();
}
let range = start..self.pending_insertion.len();
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(InsertBufferedCommand {
write_components: Vec::default(),
write_tags: Vec::default(),
tags,
components,
entities: range.clone(),
})));
&self.pending_insertion[range]
}
/// Queues the deletion of an entity in the command buffer. This writer calls `World::delete`
pub fn delete(&self, entity: Entity) { self.insert_writer(DeleteEntityCommand(entity)); }
/// Queues the addition of a component from an entity in the command buffer.
/// This writer calls `World::add_component`
pub fn add_component<C: Component>(&self, entity: Entity, component: C) {
self.insert_writer(AddComponentCommand { entity, component });
}
/// Queues the removal of a component from an entity in the command buffer.
/// This writer calls `World::remove_component`
pub fn remove_component<C: Component>(&self, entity: Entity) {
self.insert_writer(RemoveComponentCommand {
entity,
_marker: PhantomData::<C>::default(),
});
}
/// Queues the addition of a tag from an entity in the command buffer.
/// This writer calls `World::add_tag`
pub fn add_tag<T: Tag>(&self, entity: Entity, tag: T) {
self.insert_writer(AddTagCommand { entity, tag });
}
/// Queues the removal of a tag from an entity in the command buffer.
/// This writer calls `World::remove_tag`
pub fn remove_tag<T: Tag>(&self, entity: Entity) {
self.insert_writer(RemoveTagCommand {
entity,
_marker: PhantomData::<T>::default(),
});
}
/// Returns the current number of commands already queued in this `CommandBuffer` instance.
#[inline]
pub fn len(&self) -> usize { self.commands.get().len() }
/// Returns true if this `CommandBuffer` is currently empty and contains no writers.
#[inline]
pub fn is_empty(&self) -> bool { self.commands.get().len() == 0 }
}
impl Drop for CommandBuffer {
fn drop(&mut self) {
while let Some(entity) = self.free_list.pop() {
self.entity_allocator.delete_entity(entity);
}
while let Some(entity) = self.pending_insertion.pop() {
self.entity_allocator.delete_entity(entity);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::prelude::*;
#[derive(Clone, Copy, Debug, PartialEq)]
struct Pos(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Vel(f32, f32, f32);
#[derive(Default)]
struct TestResource(pub i32);
#[test]
fn create_entity_test() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let components = vec![
(Pos(1., 2., 3.), Vel(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Vel(0.4, 0.5, 0.6)),
];
let components_len = components.len();
//world.entity_allocator.get_block()
let mut command = CommandBuffer::new(&world);
let entity1 = command.start_entity().build();
let entity2 = command.start_entity().build();
command.add_component(entity1, Pos(1., 2., 3.));
command.add_component(entity2, Pos(4., 5., 6.));
command.write(&mut world);
let query = Read::<Pos>::query();
let mut count = 0;
for _ in query.iter_entities(&world) {
count += 1;
}
assert_eq!(components_len, count);
}
#[test]
fn simple_write_test() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let components = vec![
(Pos(1., 2., 3.), Vel(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Vel(0.4, 0.5, 0.6)),
];
let components_len = components.len();
//world.entity_allocator.get_block()
let mut command = CommandBuffer::new(&world);
let _ = command.insert((), components);
// Assert writing checks
// TODO:
//assert_eq!(
// vec![ComponentTypeId::of::<Pos>(), ComponentTypeId::of::<Vel>()],
// command.write_components()
//);
command.write(&mut world);
let query = Read::<Pos>::query();
let mut count = 0;
for _ in query.iter_entities_mut(&mut world) {
count += 1;
}
assert_eq!(components_len, count);
}
}

View file

@ -1,155 +0,0 @@
// Things happen here, and they work.
// ,---.
// / |
// / |
// / |
// / |
// ___,' |
// < -' :
// `-.__..--'``-,_\_
// |o/ ` :,.)_`>
// :/ ` ||/)
// (_.).__,-` |\
// /( `.`` `| :
// \'`-.) ` ; ;
// | ` /-<
// | ` / `.
// ,-_-..____ /| ` :__..-'\
// ,'-.__\\ ``-./ :` ; \
//`\ `\ `\\ \ : ( ` / , `. \
// \` \ \\ | | ` : : .\ \
// \ `\_ )) : ; | | ): :
// (`-.-'\ || |\ \ ` ; ; | |
// \-_ `;;._ ( ` / /_ | |
// `-.-.// ,'`-._\__/_,' ; |
// \:: : / ` , / |
// || | ( ,' / / |
// || ,' / |
/// Prepend a new type into a cons list
pub trait ConsPrepend<T> {
/// Result of prepend
type Output;
/// Prepend to runtime cons value
fn prepend(self, t: T) -> Self::Output;
}
impl<T> ConsPrepend<T> for () {
type Output = (T, Self);
fn prepend(self, t: T) -> Self::Output { (t, self) }
}
impl<T, A, B> ConsPrepend<T> for (A, B) {
type Output = (T, Self);
fn prepend(self, t: T) -> Self::Output { (t, self) }
}
/// Prepend a new type into a cons list
pub trait ConsAppend<T> {
/// Result of append
type Output;
/// Prepend to runtime cons value
fn append(self, t: T) -> Self::Output;
}
impl<T> ConsAppend<T> for () {
type Output = (T, Self);
fn append(self, t: T) -> Self::Output { (t, ()) }
}
impl<T, A, B: ConsAppend<T>> ConsAppend<T> for (A, B) {
type Output = (A, <B as ConsAppend<T>>::Output);
fn append(self, t: T) -> Self::Output {
let (a, b) = self;
(a, b.append(t))
}
}
/// transform cons list into a flat tuple
pub trait ConsFlatten {
/// Flattened tuple
type Output;
/// Flatten runtime cons value
fn flatten(self) -> Self::Output;
}
impl ConsFlatten for () {
type Output = ();
fn flatten(self) -> Self::Output { self }
}
macro_rules! cons {
() => (
()
);
($head:tt) => (
($head, ())
);
($head:tt, $($tail:tt),*) => (
($head, cons!($($tail),*))
);
}
macro_rules! impl_flatten {
($($items:ident),*) => {
#[allow(unused_parens)] // This is added because the nightly compiler complains
impl<$($items),*> ConsFlatten for cons!($($items),*)
{
type Output = ($($items),*);
fn flatten(self) -> Self::Output {
#[allow(non_snake_case)]
let cons!($($items),*) = self;
($($items),*)
}
}
impl_flatten!(@ $($items),*);
};
(@ $head:ident, $($tail:ident),*) => {
impl_flatten!($($tail),*);
};
(@ $head:ident) => {};
}
impl_flatten!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);
fn test_api() {}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn cons_macro() {
#![allow(clippy::unit_cmp)]
assert_eq!(cons!(), ());
assert_eq!(cons!(1), (1, ()));
assert_eq!(cons!(1, 2, 3, 4), (1, (2, (3, (4, ())))));
}
#[test]
fn cons_prepend() {
assert_eq!(().prepend(123), (123, ()));
assert_eq!(
cons!(1, 2, 3, 4, 5).prepend(123).prepend(15),
cons!(15, 123, 1, 2, 3, 4, 5)
);
}
#[test]
fn cons_append() {
assert_eq!(().append(123), (123, ()));
assert_eq!(
cons!(1, 2, 3, 4, 5).append(123).append(15),
cons!(1, 2, 3, 4, 5, 123, 15)
);
}
#[test]
fn cons_flatten() {
#![allow(clippy::unit_cmp)]
assert_eq!(().flatten(), ());
assert_eq!((1, ()).flatten(), 1);
assert_eq!(cons!(1, 2, 3, 4, 5).flatten(), (1, 2, 3, 4, 5));
}
}

View file

@ -1,482 +0,0 @@
use crate::index::ArchetypeIndex;
use crate::index::ChunkIndex;
use crate::index::ComponentIndex;
use crate::index::SetIndex;
use parking_lot::{Mutex, RwLock, RwLockWriteGuard};
use std::fmt::Display;
use std::num::Wrapping;
use std::ops::Deref;
use std::ops::DerefMut;
use std::sync::Arc;
pub type EntityIndex = u32;
pub(crate) type EntityVersion = Wrapping<u32>;
/// A handle to an entity.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct Entity {
index: EntityIndex,
version: EntityVersion,
}
impl Entity {
pub(crate) fn new(index: EntityIndex, version: EntityVersion) -> Entity {
Entity { index, version }
}
pub fn index(self) -> EntityIndex { self.index }
}
impl Display for Entity {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}#{}", self.index, self.version)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct EntityLocation {
archetype_index: ArchetypeIndex,
set_index: SetIndex,
chunk_index: ChunkIndex,
component_index: ComponentIndex,
}
impl EntityLocation {
pub(crate) fn new(
archetype_index: ArchetypeIndex,
set_index: SetIndex,
chunk_index: ChunkIndex,
component_index: ComponentIndex,
) -> Self {
EntityLocation {
archetype_index,
set_index,
chunk_index,
component_index,
}
}
pub fn archetype(&self) -> ArchetypeIndex { self.archetype_index }
pub fn set(&self) -> SetIndex { self.set_index }
pub fn chunk(&self) -> ChunkIndex { self.chunk_index }
pub fn component(&self) -> ComponentIndex { self.component_index }
}
pub(crate) struct Locations {
blocks: Vec<Option<Vec<EntityLocation>>>,
}
impl Locations {
pub fn new() -> Self { Locations { blocks: Vec::new() } }
fn index(entity: EntityIndex) -> (usize, usize) {
let block = entity as usize / BlockAllocator::BLOCK_SIZE;
let index = entity as usize - block * BlockAllocator::BLOCK_SIZE;
(block, index)
}
pub fn get(&self, entity: Entity) -> Option<EntityLocation> {
let (block, index) = Locations::index(entity.index());
self.blocks
.get(block)
.map(|b| b.as_ref())
.flatten()
.map(|b| b[index])
}
pub fn set(&mut self, entity: Entity, location: EntityLocation) {
let (block_index, index) = Locations::index(entity.index());
if self.blocks.len() <= block_index {
let fill = block_index - self.blocks.len() + 1;
self.blocks.extend((0..fill).map(|_| None));
}
let block_opt = &mut self.blocks[block_index];
let block = block_opt.get_or_insert_with(|| {
std::iter::repeat(EntityLocation::new(
ArchetypeIndex(0),
SetIndex(0),
ChunkIndex(0),
ComponentIndex(0),
))
.take(BlockAllocator::BLOCK_SIZE)
.collect()
});
block[index] = location;
}
}
#[derive(Debug)]
pub(crate) struct BlockAllocator {
allocated: usize,
free: Vec<EntityBlock>,
}
impl BlockAllocator {
const BLOCK_SIZE: usize = 1024;
pub(crate) fn new() -> Self {
BlockAllocator {
allocated: 0,
free: Vec::new(),
}
}
pub fn allocate(&mut self) -> EntityBlock {
if let Some(block) = self.free.pop() {
block
} else {
let block = EntityBlock::new(self.allocated as EntityIndex, BlockAllocator::BLOCK_SIZE);
self.allocated += BlockAllocator::BLOCK_SIZE;
block
}
}
pub fn free(&mut self, block: EntityBlock) { self.free.push(block); }
}
#[derive(Debug)]
pub struct EntityBlock {
start: EntityIndex,
len: usize,
versions: Vec<EntityVersion>,
free: Vec<EntityIndex>,
}
impl EntityBlock {
pub fn new(start: EntityIndex, len: usize) -> EntityBlock {
EntityBlock {
start,
len,
versions: Vec::with_capacity(len),
free: Vec::new(),
}
}
fn index(&self, index: EntityIndex) -> usize { (index - self.start) as usize }
pub fn in_range(&self, index: EntityIndex) -> bool {
index >= self.start && index < (self.start + self.len as u32)
}
pub fn is_alive(&self, entity: Entity) -> Option<bool> {
if entity.index >= self.start {
let i = self.index(entity.index);
self.versions.get(i).map(|v| *v == entity.version)
} else {
None
}
}
pub fn allocate(&mut self) -> Option<Entity> {
if let Some(index) = self.free.pop() {
let i = self.index(index);
Some(Entity::new(index, self.versions[i]))
} else if self.versions.len() < self.len {
let index = self.start + self.versions.len() as EntityIndex;
self.versions.push(Wrapping(1));
Some(Entity::new(index, Wrapping(1)))
} else {
None
}
}
pub fn free(&mut self, entity: Entity) -> bool {
if let Some(true) = self.is_alive(entity) {
let i = self.index(entity.index);
self.versions[i] += Wrapping(1);
self.free.push(entity.index);
true
} else {
false
}
}
}
#[derive(Debug)]
struct Blocks {
blocks: Vec<Option<EntityBlock>>,
}
impl Blocks {
fn new() -> Self { Self { blocks: Vec::new() } }
pub fn index(entity: EntityIndex) -> usize { entity as usize / BlockAllocator::BLOCK_SIZE }
fn find(&self, entity: EntityIndex) -> Option<&EntityBlock> {
let i = Blocks::index(entity);
self.blocks.get(i).map(|b| b.as_ref()).flatten()
}
fn find_mut(&mut self, entity: EntityIndex) -> Option<&mut EntityBlock> {
let i = Blocks::index(entity);
self.blocks.get_mut(i).map(|b| b.as_mut()).flatten()
}
fn push(&mut self, block: EntityBlock) -> usize {
let i = Blocks::index(block.start);
if self.blocks.len() > i {
self.blocks[i] = Some(block);
} else {
let fill = i - self.blocks.len();
self.blocks.extend((0..fill).map(|_| None));
self.blocks.push(Some(block));
}
i
}
fn append(&mut self, other: &mut Blocks) {
for block in other.blocks.drain(..) {
if let Some(block) = block {
self.push(block);
}
}
}
}
impl Deref for Blocks {
type Target = [Option<EntityBlock>];
fn deref(&self) -> &Self::Target { self.blocks.deref() }
}
impl DerefMut for Blocks {
fn deref_mut(&mut self) -> &mut Self::Target { self.blocks.deref_mut() }
}
/// Manages the allocation and deletion of `Entity` IDs within a world.
#[derive(Debug)]
pub struct EntityAllocator {
allocator: Arc<Mutex<BlockAllocator>>,
blocks: RwLock<Blocks>,
}
impl EntityAllocator {
pub(crate) fn new(allocator: Arc<Mutex<BlockAllocator>>) -> Self {
EntityAllocator {
allocator,
blocks: RwLock::new(Blocks::new()),
}
}
/// Determines if the given `Entity` is considered alive.
pub fn is_alive(&self, entity: Entity) -> bool {
self.blocks
.read()
.find(entity.index())
.map(|b| b.is_alive(entity))
.flatten()
.unwrap_or(false)
}
/// Allocates a new unused `Entity` ID.
pub fn create_entity(&self) -> Entity { self.create_entities().next().unwrap() }
/// Creates an iterator which allocates new `Entity` IDs.
pub fn create_entities(&self) -> CreateEntityIter {
CreateEntityIter {
blocks: self.blocks.write(),
allocator: &self.allocator,
current_block: None,
}
}
pub(crate) fn delete_entity(&self, entity: Entity) -> bool {
self.blocks
.write()
.find_mut(entity.index())
.map(|b| b.free(entity))
.unwrap_or(false)
}
pub(crate) fn delete_all_entities(&self) {
for block in self.blocks.write().blocks.drain(..) {
if let Some(mut block) = block {
// If any entity in the block is in an allocated state, clear
// and repopulate the free list. This forces all entities into an
// unallocated state. Bump versions of all entity indexes to
// ensure that we don't reuse the same entity.
if block.free.len() < block.versions.len() {
block.free.clear();
for (i, version) in block.versions.iter_mut().enumerate() {
*version += Wrapping(1);
block.free.push(i as u32 + block.start);
}
}
self.allocator.lock().free(block);
}
}
}
pub(crate) fn merge(&self, other: EntityAllocator) {
assert!(Arc::ptr_eq(&self.allocator, &other.allocator));
self.blocks.write().append(&mut *other.blocks.write());
}
}
impl Drop for EntityAllocator {
fn drop(&mut self) { self.delete_all_entities(); }
}
pub struct CreateEntityIter<'a> {
current_block: Option<usize>,
blocks: RwLockWriteGuard<'a, Blocks>,
allocator: &'a Mutex<BlockAllocator>,
}
impl<'a> Iterator for CreateEntityIter<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
// try and allocate from the block we last used
if let Some(block) = self.current_block {
if let Some(entity) = self.blocks[block].as_mut().unwrap().allocate() {
return Some(entity);
}
}
// search for a block with spare entities
for (i, allocated) in self
.blocks
.iter_mut()
.enumerate()
.rev()
.filter(|(_, b)| b.is_some())
.map(|(i, b)| (i, b.as_mut().unwrap().allocate()))
{
if let Some(entity) = allocated {
self.current_block = Some(i);
return Some(entity);
}
}
// allocate a new block
let mut block = self.allocator.lock().allocate();
let entity = block.allocate().unwrap();
self.current_block = Some(self.blocks.push(block));
Some(entity)
}
}
#[cfg(test)]
mod tests {
use crate::entity::*;
use std::collections::HashSet;
#[test]
fn create_entity() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
allocator.create_entity();
}
#[test]
fn create_entity_many() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
for _ in 0..512 {
allocator.create_entity();
}
}
#[test]
fn create_entity_many_blocks() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
for _ in 0..3000 {
allocator.create_entity();
}
}
#[test]
fn create_entity_recreate() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
for _ in 0..3 {
let entities: Vec<Entity> = (0..512).map(|_| allocator.create_entity()).collect();
for e in entities {
allocator.delete_entity(e);
}
}
}
#[test]
fn is_alive_allocated() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = allocator.create_entity();
assert_eq!(true, allocator.is_alive(entity));
}
#[test]
fn is_alive_unallocated() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = Entity::new(10 as EntityIndex, Wrapping(10));
assert_eq!(false, allocator.is_alive(entity));
}
#[test]
fn is_alive_killed() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = allocator.create_entity();
allocator.delete_entity(entity);
assert_eq!(false, allocator.is_alive(entity));
}
#[test]
fn delete_entity_was_alive() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = allocator.create_entity();
assert_eq!(true, allocator.delete_entity(entity));
}
#[test]
fn delete_entity_was_dead() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = allocator.create_entity();
allocator.delete_entity(entity);
assert_eq!(false, allocator.delete_entity(entity));
}
#[test]
fn delete_entity_was_unallocated() {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = Entity::new(10 as EntityIndex, Wrapping(10));
assert_eq!(false, allocator.delete_entity(entity));
}
#[test]
fn multiple_allocators_unique_ids() {
let blocks = Arc::from(Mutex::new(BlockAllocator::new()));
let allocator_a = EntityAllocator::new(blocks.clone());
let allocator_b = EntityAllocator::new(blocks);
let mut entities_a = HashSet::<Entity>::default();
let mut entities_b = HashSet::<Entity>::default();
for _ in 0..5 {
entities_a.extend((0..1500).map(|_| allocator_a.create_entity()));
entities_b.extend((0..1500).map(|_| allocator_b.create_entity()));
}
assert_eq!(true, entities_a.is_disjoint(&entities_b));
for e in entities_a {
assert_eq!(true, allocator_a.is_alive(e));
assert_eq!(false, allocator_b.is_alive(e));
}
for e in entities_b {
assert_eq!(false, allocator_a.is_alive(e));
assert_eq!(true, allocator_b.is_alive(e));
}
}
}

View file

@ -1,136 +0,0 @@
use crate::entity::Entity;
use crate::filter::{
ArchetypeFilterData, ChunkFilterData, ChunksetFilterData, EntityFilter, Filter, FilterResult,
};
use crate::index::ArchetypeIndex;
use crate::index::ChunkIndex;
use crate::index::SetIndex;
use crate::storage::ArchetypeId;
use crate::storage::ChunkId;
use crossbeam_channel::{Sender, TrySendError};
use std::sync::Arc;
/// Events emitted by a world to subscribers. See `World.subscribe(Sender, EntityFilter)`.
#[derive(Debug, Clone)]
pub enum Event {
/// A new archetype has been created.
ArchetypeCreated(ArchetypeId),
/// A new chunk has been created.
ChunkCreated(ChunkId),
/// An entity has been inserted into a chunk.
EntityInserted(Entity, ChunkId),
/// An entity has been removed from a chunk.
EntityRemoved(Entity, ChunkId),
}
pub(crate) trait EventFilter: Send + Sync + 'static {
fn matches_archetype(&self, data: ArchetypeFilterData, index: ArchetypeIndex) -> bool;
fn matches_chunkset(&self, data: ChunksetFilterData, index: SetIndex) -> bool;
fn matches_chunk(&self, data: ChunkFilterData, index: ChunkIndex) -> bool;
}
pub(crate) struct EventFilterWrapper<T: EntityFilter + Sync + 'static>(pub T);
impl<T: EntityFilter + Sync + 'static> EventFilter for EventFilterWrapper<T> {
fn matches_archetype(
&self,
data: ArchetypeFilterData,
ArchetypeIndex(index): ArchetypeIndex,
) -> bool {
let (filter, _, _) = self.0.filters();
if let Some(element) = filter.collect(data).nth(index) {
return filter.is_match(&element).is_pass();
}
false
}
fn matches_chunkset(&self, data: ChunksetFilterData, SetIndex(index): SetIndex) -> bool {
let (_, filter, _) = self.0.filters();
if let Some(element) = filter.collect(data).nth(index) {
return filter.is_match(&element).is_pass();
}
false
}
fn matches_chunk(&self, data: ChunkFilterData, ChunkIndex(index): ChunkIndex) -> bool {
let (_, _, filter) = self.0.filters();
if let Some(element) = filter.collect(data).nth(index) {
return filter.is_match(&element).is_pass();
}
false
}
}
#[derive(Clone)]
pub(crate) struct Subscriber {
pub filter: Arc<dyn EventFilter>,
pub sender: Sender<Event>,
}
impl Subscriber {
pub fn new(filter: Arc<dyn EventFilter>, sender: Sender<Event>) -> Self {
Self { filter, sender }
}
}
#[derive(Clone)]
pub(crate) struct Subscribers {
subscribers: Vec<Subscriber>,
}
impl Subscribers {
pub fn new() -> Self {
Self {
subscribers: Vec::new(),
}
}
pub fn push(&mut self, subscriber: Subscriber) { self.subscribers.push(subscriber); }
pub fn send(&mut self, message: Event) {
for i in (0..self.subscribers.len()).rev() {
if let Err(error) = self.subscribers[i].sender.try_send(message.clone()) {
if let TrySendError::Disconnected(_) = error {
self.subscribers.swap_remove(i);
}
}
}
}
pub fn matches_archetype(&self, data: ArchetypeFilterData, index: ArchetypeIndex) -> Self {
let subscribers = self
.subscribers
.iter()
.filter(|sub| sub.filter.matches_archetype(data, index))
.cloned()
.collect();
Self { subscribers }
}
pub fn matches_chunkset(&self, data: ChunksetFilterData, index: SetIndex) -> Self {
let subscribers = self
.subscribers
.iter()
.filter(|sub| sub.filter.matches_chunkset(data, index))
.cloned()
.collect();
Self { subscribers }
}
pub fn matches_chunk(&self, data: ChunkFilterData, index: ChunkIndex) -> Self {
let subscribers = self
.subscribers
.iter()
.filter(|sub| sub.filter.matches_chunk(data, index))
.cloned()
.collect();
Self { subscribers }
}
}
impl Default for Subscribers {
fn default() -> Self { Subscribers::new() }
}

File diff suppressed because it is too large Load diff

View file

@ -1,59 +0,0 @@
use crate::entity::Entity;
use crate::storage::ArchetypeData;
use crate::storage::Chunkset;
use crate::storage::ComponentStorage;
use std::fmt;
use std::ops::Deref;
use std::ops::Index;
use std::ops::IndexMut;
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct SetIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct ChunkIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct ArchetypeIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct ComponentIndex(pub usize);
macro_rules! impl_index {
($index_ty:ty: $output_ty:ty) => {
impl Index<$index_ty> for [$output_ty] {
type Output = $output_ty;
#[inline(always)]
fn index(&self, index: $index_ty) -> &Self::Output { &self[index.0] }
}
impl IndexMut<$index_ty> for [$output_ty] {
#[inline(always)]
fn index_mut(&mut self, index: $index_ty) -> &mut Self::Output { &mut self[index.0] }
}
impl Index<$index_ty> for Vec<$output_ty> {
type Output = $output_ty;
#[inline(always)]
fn index(&self, index: $index_ty) -> &Self::Output { &self[index.0] }
}
impl IndexMut<$index_ty> for Vec<$output_ty> {
#[inline(always)]
fn index_mut(&mut self, index: $index_ty) -> &mut Self::Output { &mut self[index.0] }
}
impl Deref for $index_ty {
type Target = usize;
#[inline(always)]
fn deref(&self) -> &usize { &self.0 }
}
impl fmt::Display for $index_ty {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
};
}
impl_index!(SetIndex: Chunkset);
impl_index!(ChunkIndex: ComponentStorage);
impl_index!(ArchetypeIndex: ArchetypeData);
impl_index!(ComponentIndex: Entity);

View file

@ -1,220 +0,0 @@
use std::iter::repeat;
use std::iter::ExactSizeIterator;
use std::iter::FusedIterator;
use std::iter::Repeat;
use std::iter::Take;
use std::slice::Iter;
/// An iterator over slices in a `SliceVec`.
#[derive(Clone)]
pub struct SliceVecIter<'a, T> {
pub(crate) data: &'a [T],
pub(crate) counts: &'a [usize],
}
impl<'a, T> Iterator for SliceVecIter<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if let Some((count, remaining_counts)) = self.counts.split_first() {
let (data, remaining_data) = self.data.split_at(*count);
self.counts = remaining_counts;
self.data = remaining_data;
Some(data)
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { (self.counts.len(), Some(self.counts.len())) }
#[inline]
fn count(self) -> usize { self.len() }
}
impl<'a, T> ExactSizeIterator for SliceVecIter<'a, T> {}
impl<'a, T> FusedIterator for SliceVecIter<'a, T> {}
/// A trait for iterators that are able to be split in roughly half.
/// Used for splitting work among threads in parallel iterator.
pub trait FissileIterator: Iterator + Sized {
/// Divides one iterator into two, roughly in half.
///
/// The implementation doesn't have to be precise,
/// but the closer to the midpoint it is, the better
/// the parallel iterator will behave.
///
/// Returns two split iterators and a number of elements left in first split.
/// That returned size must be exact.
fn split(self) -> (Self, Self, usize);
}
impl<'a, T> FissileIterator for Iter<'a, T> {
fn split(self) -> (Self, Self, usize) {
let slice = self.as_slice();
let split_point = slice.len() / 2;
let (left_slice, right_slice) = slice.split_at(split_point);
(left_slice.iter(), right_slice.iter(), split_point)
}
}
impl<'a, T> FissileIterator for SliceVecIter<'a, T> {
fn split(self) -> (Self, Self, usize) {
let counts_split_point = self.counts.len() / 2;
let (left_counts, right_counts) = self.counts.split_at(counts_split_point);
let data_split_point = left_counts.iter().sum();
let (left_data, right_data) = self.data.split_at(data_split_point);
(
Self {
data: left_data,
counts: left_counts,
},
Self {
data: right_data,
counts: right_counts,
},
counts_split_point,
)
}
}
pub(crate) struct FissileEnumerate<I: FissileIterator> {
iter: I,
count: usize,
}
impl<I: FissileIterator> FissileEnumerate<I> {
pub(crate) fn new(iter: I) -> Self { Self { iter, count: 0 } }
}
impl<I: FissileIterator> Iterator for FissileEnumerate<I>
where
I: Iterator,
{
type Item = (usize, <I as Iterator>::Item);
#[inline]
fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
self.iter.next().map(|a| {
let ret = (self.count, a);
self.count += 1;
ret
})
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
#[inline]
fn nth(&mut self, n: usize) -> Option<(usize, I::Item)> {
self.iter.nth(n).map(|a| {
let i = self.count + n;
self.count = i + 1;
(i, a)
})
}
#[inline]
fn count(self) -> usize { self.iter.count() }
#[inline]
fn fold<Acc, Fold>(self, init: Acc, mut fold: Fold) -> Acc
where
Fold: FnMut(Acc, Self::Item) -> Acc,
{
let mut count = self.count;
self.iter.fold(init, move |acc, item| {
let acc = fold(acc, (count, item));
count += 1;
acc
})
}
}
impl<I: FissileIterator> FissileIterator for FissileEnumerate<I> {
fn split(self) -> (Self, Self, usize) {
let (left, right, left_size) = self.iter.split();
(
Self {
iter: left,
count: self.count,
},
Self {
iter: right,
count: self.count + left_size,
},
left_size,
)
}
}
impl<I: ExactSizeIterator + FissileIterator> ExactSizeIterator for FissileEnumerate<I> {
fn len(&self) -> usize { self.iter.len() }
}
impl<I: FusedIterator + FissileIterator> FusedIterator for FissileEnumerate<I> {}
impl<T: Clone> FissileIterator for Take<Repeat<T>> {
fn split(mut self) -> (Self, Self, usize) {
if let Some(value) = self.next() {
let (len, len_max) = self.size_hint();
assert_eq!(Some(len), len_max);
let first_part = len / 2;
let second_part = len - first_part;
(
repeat(value.clone()).take(first_part),
repeat(value).take(second_part),
first_part,
)
} else {
(self.clone(), self, 0)
}
}
}
// Custom fissile zip iterator. Assumes that it's child iterators will always
// split in the same location. Panics when this is violated.
pub struct FissileZip<A, B> {
a: A,
b: B,
}
impl<A, B> FissileZip<A, B> {
pub(crate) fn new(a: A, b: B) -> Self { Self { a, b } }
}
impl<A: Iterator, B: Iterator> Iterator for FissileZip<A, B> {
type Item = (A::Item, B::Item);
fn next(&mut self) -> Option<(A::Item, B::Item)> {
self.a.next().and_then(|x| self.b.next().map(|y| (x, y)))
}
fn size_hint(&self) -> (usize, Option<usize>) {
let (a_lower, a_upper) = self.a.size_hint();
let (b_lower, b_upper) = self.b.size_hint();
let lower = std::cmp::min(a_lower, b_lower);
let upper = match (a_upper, b_upper) {
(Some(x), Some(y)) => Some(std::cmp::min(x, y)),
(Some(x), None) => Some(x),
(None, Some(y)) => Some(y),
(None, None) => None,
};
(lower, upper)
}
}
impl<A: FissileIterator, B: FissileIterator> FissileIterator for FissileZip<A, B> {
fn split(self) -> (Self, Self, usize) {
let (a_left, a_right, a_left_size) = self.a.split();
let (b_left, b_right, b_left_size) = self.b.split();
assert_eq!(a_left_size, b_left_size);
(
Self::new(a_left, b_left),
Self::new(a_right, b_right),
a_left_size,
)
}
}

View file

@ -1,30 +0,0 @@
#![allow(dead_code)]
pub mod borrow;
pub mod command;
pub mod cons;
pub mod entity;
pub mod event;
pub mod filter;
pub mod index;
pub mod iterator;
pub mod query;
pub mod storage;
pub mod world;
#[cfg(feature = "serialize")]
pub mod serialize;
mod tuple;
mod zip;
pub mod prelude {
pub use crate::{
command::CommandBuffer,
entity::Entity,
event::Event,
filter::filter_fns::*,
query::{IntoQuery, Query, Read, Tagged, TryRead, TryWrite, Write},
world::{Universe, World},
};
}

File diff suppressed because it is too large Load diff

View file

@ -1,749 +0,0 @@
use crate::{
entity::{Entity, EntityAllocator},
index::{ArchetypeIndex, ChunkIndex, SetIndex},
storage::{
ArchetypeData, ArchetypeDescription, Chunkset, ComponentMeta, ComponentTypeId, TagMeta,
TagStorage, TagTypeId,
},
world::World,
};
use serde::{
self,
de::{self, DeserializeSeed, Visitor},
Deserialize, Deserializer,
};
use std::{cell::RefCell, collections::HashMap, ptr::NonNull};
/// Returns a type that implements `serde::DeserializeSeed`.
/// Pass the returned value to your `serde::Deserializer`.
/// The caller must provide an implementation for `WorldDeserializer`.
pub fn deserializable<'a, 'b, WD: WorldDeserializer>(
world: &'a mut World,
deserialize_impl: &'b WD,
) -> WorldDeserialize<'a, 'b, WD> {
WorldDeserialize {
world,
user: deserialize_impl,
}
}
/// Deserializes data into the provided World using the provided `serde::Deserializer`.
/// The caller must provide an implementation for `WorldDeserializer`.
pub fn deserialize<'dd, 'a, 'b, WD: WorldDeserializer, D: Deserializer<'dd>>(
world: &'a mut World,
deserialize_impl: &'b WD,
deserializer: D,
) -> Result<(), <D as Deserializer<'dd>>::Error> {
let deserializable = deserializable(world, deserialize_impl);
<WorldDeserialize<WD> as DeserializeSeed>::deserialize(deserializable, deserializer)
}
/// User must implement this trait to deserialize a World.
/// The implementation must match that of the `WorldSerializer` provided
/// when serializing the data that is to be deserialized by this impl.
pub trait WorldDeserializer {
/// Deserializes an ArchetypeDescription
fn deserialize_archetype_description<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
) -> Result<ArchetypeDescription, <D as Deserializer<'de>>::Error>;
/// Deserializes component data.
/// `get_next_storage_fn` will return Some(component_data_ptr, num_elements) until all
/// reserved memory has been exhausted, whereupon it will return None.
/// `component_data_ptr` are pointers to reserved memory in chunks
/// that have been reserved to accomodate the number of entities that were previously deserialized
/// by `deserialize_entities`.
///
/// # Safety
///
/// The implementation must ensure `get_next_storage_fn` is called until it returns
/// None, and that all memory returned by `get_next_storage_fn` is properly initialized
/// before this function returns.
fn deserialize_components<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
component_type: &ComponentTypeId,
component_meta: &ComponentMeta,
get_next_storage_fn: &mut dyn FnMut() -> Option<(NonNull<u8>, usize)>,
) -> Result<(), <D as Deserializer<'de>>::Error>;
/// Deserializes tag data into a TagStorage.
fn deserialize_tags<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
tag_type: &TagTypeId,
tag_meta: &TagMeta,
tags: &mut TagStorage,
) -> Result<(), <D as Deserializer<'de>>::Error>;
/// Deserializes entity identifiers into the provided buffer.
fn deserialize_entities<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
entity_allocator: &EntityAllocator,
entities: &mut Vec<Entity>,
) -> Result<(), <D as Deserializer<'de>>::Error>;
}
/// Implements `DeserializeSeed` and can be passed to a `serde::Deserializer`.
pub struct WorldDeserialize<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for WorldDeserialize<'a, 'b, WD> {
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
let world_refcell = RefCell::new(self.world);
deserializer.deserialize_seq(SeqDeserializer(ArchetypeDeserializer {
user: self.user,
world: &world_refcell,
}))?;
Ok(())
}
}
#[derive(Deserialize, Debug)]
#[serde(field_identifier, rename_all = "snake_case")]
enum ArchetypeField {
Description,
Tags,
ChunkSets,
}
struct ArchetypeDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a RefCell<&'a mut World>,
}
impl<'a, 'b, WD: WorldDeserializer> Clone for ArchetypeDeserializer<'a, 'b, WD> {
fn clone(&self) -> Self {
Self {
user: self.user,
world: self.world,
}
}
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ArchetypeDeserializer<'a, 'b, WD>
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
impl<'a, 'b, 'de, WD: WorldDeserializer> Visitor<'de> for ArchetypeDeserializer<'a, 'b, WD> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("struct Archetype")
}
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: de::SeqAccess<'de>,
{
let archetype_idx = seq
.next_element_seed(ArchetypeDescriptionDeserialize {
user: self.user,
world: self.world,
})?
.expect("expected description");
let mut world = self.world.borrow_mut();
let archetype_data = &mut world.storage_mut().archetypes_mut()[archetype_idx];
let chunkset_map = seq
.next_element_seed(TagsDeserializer {
user: self.user,
archetype: archetype_data,
})?
.expect("expected tags");
seq.next_element_seed(ChunkSetDeserializer {
user: self.user,
world: &mut *world,
archetype_idx,
chunkset_map: &chunkset_map,
})?
.expect("expected chunk_sets");
Ok(())
}
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: de::MapAccess<'de>,
{
let mut archetype_idx = None;
let mut chunkset_map = None;
while let Some(key) = map.next_key()? {
match key {
ArchetypeField::Description => {
archetype_idx =
Some(map.next_value_seed(ArchetypeDescriptionDeserialize {
user: self.user,
world: self.world,
})?);
}
ArchetypeField::Tags => {
let archetype_idx =
archetype_idx.expect("expected archetype description before tags");
let mut world = self.world.borrow_mut();
let archetype_data =
&mut world.storage_mut().archetypes_mut()[archetype_idx];
chunkset_map = Some(map.next_value_seed(TagsDeserializer {
user: self.user,
archetype: archetype_data,
})?);
}
ArchetypeField::ChunkSets => {
let archetype_idx = archetype_idx
.expect("expected archetype description before chunksets");
let mut world = self.world.borrow_mut();
map.next_value_seed(ChunkSetDeserializer {
user: self.user,
world: &mut *world,
archetype_idx,
chunkset_map: chunkset_map
.as_ref()
.expect("expected tags before chunksets"),
})?;
return Ok(());
}
}
}
Err(de::Error::missing_field("data"))
}
}
const FIELDS: &[&str] = &["description", "tags", "chunk_sets"];
deserializer.deserialize_struct("Archetype", FIELDS, self)
}
}
pub struct SeqDeserializer<T>(T);
impl<'de, T: DeserializeSeed<'de> + Clone> DeserializeSeed<'de> for SeqDeserializer<T> {
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(self)
}
}
impl<'de, T: DeserializeSeed<'de> + Clone> Visitor<'de> for SeqDeserializer<T> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("sequence of objects")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
while let Some(_) = seq.next_element_seed::<T>(self.0.clone())? {}
Ok(())
}
}
struct ArchetypeDescriptionDeserialize<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a RefCell<&'a mut World>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ArchetypeDescriptionDeserialize<'a, 'b, WD>
{
type Value = ArchetypeIndex;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
let archetype_desc = <WD as WorldDeserializer>::deserialize_archetype_description::<D>(
self.user,
deserializer,
)?;
let mut world = self.world.borrow_mut();
let storage = world.storage_mut();
Ok(storage
.archetypes()
.iter()
.position(|a| a.description() == &archetype_desc)
.map(ArchetypeIndex)
.unwrap_or_else(|| {
let (idx, _) = storage.alloc_archetype(archetype_desc);
idx
}))
}
}
type ChunkSetMapping = HashMap<usize, SetIndex>;
struct TagsDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
archetype: &'a mut ArchetypeData,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for TagsDeserializer<'a, 'b, WD> {
type Value = ChunkSetMapping;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
let (mut deserialized_tags, this) = deserializer.deserialize_seq(self)?;
let tag_types = this.archetype.description().tags().to_vec();
let mut chunkset_map = ChunkSetMapping::new();
let tags = this.archetype.tags_mut();
assert_eq!(tags.0.len(), tag_types.len());
// To simplify later code, shuffle the &mut tag_storage indices to match tag_types
let world_tag_storages = {
let mut world_tag_storages: Vec<&mut TagStorage> = Vec::with_capacity(tag_types.len());
for (tag_type, tag_storage) in tags.0.iter_mut() {
let type_idx = tag_types
.iter()
.position(|(ty, _)| ty == tag_type)
.expect("tag type mismatch with Tags");
unsafe {
std::ptr::write(world_tag_storages.as_mut_ptr().add(type_idx), tag_storage);
}
}
unsafe {
world_tag_storages.set_len(tag_types.len());
}
world_tag_storages
};
let num_world_values = world_tag_storages.iter().map(|ts| ts.len()).next();
let num_tag_values = deserialized_tags
.iter()
.map(|ts| ts.len())
.next()
.unwrap_or(0);
let mut chunksets_to_add = Vec::new();
for i in 0..num_tag_values {
let mut matching_idx = None;
if let Some(num_world_values) = num_world_values {
for j in 0..num_world_values {
let mut is_matching = true;
for tag_idx in 0..tag_types.len() {
unsafe {
let (de_ptr, stride, _) = deserialized_tags[tag_idx].data_raw();
let (world_ptr, _, _) = world_tag_storages[tag_idx].data_raw();
let (_, tag_meta) = tag_types[tag_idx];
let de_offset = (i * stride) as isize;
let world_offset = (j * stride) as isize;
if !tag_meta.equals(
de_ptr.as_ptr().offset(de_offset),
world_ptr.as_ptr().offset(world_offset),
) {
is_matching = false;
break;
}
}
}
if is_matching {
matching_idx = Some(j);
break;
}
}
}
// If we have a matching tag set, we will drop our temporary values manually.
// All temporary TagStorages in `deserialized_tags` will be forgotten later
// because we move data into World when allocating a new chunkset
if let Some(world_idx) = matching_idx {
chunkset_map.insert(i, SetIndex(world_idx));
for tag_idx in 0..tag_types.len() {
unsafe {
let (_, tag_meta) = tag_types[tag_idx];
let (de_ptr, stride, _) = deserialized_tags[tag_idx].data_raw();
let de_offset = (i * stride) as isize;
tag_meta.drop(de_ptr.as_ptr().offset(de_offset) as *mut u8);
}
}
} else {
chunksets_to_add.push(i);
}
}
for tag_value_idx in chunksets_to_add {
let chunkset_idx = this.archetype.alloc_chunk_set(|tags| {
for (tag_idx, (tag_type, _)) in tag_types.iter().enumerate() {
unsafe {
let (de_ptr, stride, _) = deserialized_tags[tag_idx].data_raw();
let de_offset = (tag_value_idx * stride) as isize;
let world_storage = tags
.get_mut(*tag_type)
.expect("tag_storage should be present after allocating chunk_set");
world_storage.push_raw(de_ptr.as_ptr().offset(de_offset));
}
}
});
chunkset_map.insert(tag_value_idx, chunkset_idx);
}
for tag in deserialized_tags.drain(0..) {
tag.forget_data();
}
if num_tag_values == 0 {
let chunkset_idx = this.archetype.alloc_chunk_set(|_| {});
chunkset_map.insert(0, chunkset_idx);
}
Ok(chunkset_map)
}
}
impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for TagsDeserializer<'a, 'b, WD> {
type Value = (Vec<TagStorage>, Self);
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("sequence of objects")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let tag_types = self.archetype.description().tags();
let mut deserialized_tags = Vec::new();
for (tag_type, tag_meta) in tag_types {
let mut tag_storage = TagStorage::new(*tag_meta);
if seq
.next_element_seed(TagStorageDeserializer {
user: self.user,
tag_storage: &mut tag_storage,
tag_type: &tag_type,
tag_meta: &tag_meta,
})?
.is_none()
{
break;
}
deserialized_tags.push(tag_storage);
}
Ok((deserialized_tags, self))
}
}
struct TagStorageDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
tag_storage: &'a mut TagStorage,
tag_type: &'a TagTypeId,
tag_meta: &'a TagMeta,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for TagStorageDeserializer<'a, 'b, WD>
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
self.user
.deserialize_tags(deserializer, self.tag_type, self.tag_meta, self.tag_storage)?;
Ok(())
}
}
struct ChunkSetDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: ArchetypeIndex,
chunkset_map: &'a ChunkSetMapping,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for ChunkSetDeserializer<'a, 'b, WD> {
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(self)
}
}
impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for ChunkSetDeserializer<'a, 'b, WD> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("sequence of objects")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
for idx in 0.. {
let chunkset_idx = self.chunkset_map.get(&idx).cloned();
if seq
.next_element_seed(ChunkListDeserializer {
user: self.user,
world: self.world,
archetype_idx: self.archetype_idx,
chunkset_idx,
})?
.is_none()
{
break;
}
}
Ok(())
}
}
struct ChunkListDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: ArchetypeIndex,
chunkset_idx: Option<SetIndex>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ChunkListDeserializer<'a, 'b, WD>
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(self)
}
}
impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for ChunkListDeserializer<'a, 'b, WD> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("sequence of struct Chunk")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
loop {
if seq
.next_element_seed(ChunkDeserializer {
user: self.user,
world: self.world,
archetype_idx: self.archetype_idx,
chunkset_idx: self.chunkset_idx.expect("expected chunkset_idx"),
})?
.is_none()
{
break;
}
}
Ok(())
}
}
#[derive(Deserialize, Debug)]
#[serde(field_identifier, rename_all = "lowercase")]
enum ChunkField {
Entities,
Components,
}
struct ChunkDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: ArchetypeIndex,
chunkset_idx: SetIndex,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for ChunkDeserializer<'a, 'b, WD> {
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_struct("Chunk", &["entities", "components"], self)
}
}
impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for ChunkDeserializer<'a, 'b, WD> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("struct Chunk")
}
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: de::SeqAccess<'de>,
{
let chunk_ranges = seq.next_element_seed(EntitiesDeserializer {
user: self.user,
world: self.world,
archetype_idx: self.archetype_idx,
chunkset_idx: self.chunkset_idx,
})?;
seq.next_element_seed(ComponentsDeserializer {
user: self.user,
world: self.world,
archetype_idx: self.archetype_idx,
chunkset_idx: self.chunkset_idx,
chunk_ranges: chunk_ranges
.as_ref()
.expect("expected entities before components"),
})?;
Ok(())
}
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: de::MapAccess<'de>,
{
let mut chunk_ranges = None;
while let Some(key) = map.next_key()? {
match key {
ChunkField::Entities => {
chunk_ranges = Some(map.next_value_seed(EntitiesDeserializer {
user: self.user,
world: self.world,
archetype_idx: self.archetype_idx,
chunkset_idx: self.chunkset_idx,
})?);
}
ChunkField::Components => {
map.next_value_seed(ComponentsDeserializer {
user: self.user,
world: self.world,
archetype_idx: self.archetype_idx,
chunkset_idx: self.chunkset_idx,
chunk_ranges: chunk_ranges
.as_ref()
.expect("expected entities before components"),
})?;
}
}
}
Ok(())
}
}
struct EntitiesDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: ArchetypeIndex,
chunkset_idx: SetIndex,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for EntitiesDeserializer<'a, 'b, WD> {
type Value = Vec<(ChunkIndex, usize)>;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
let mut entities = Vec::new();
self.user.deserialize_entities(
deserializer,
&self.world.entity_allocator,
&mut entities,
)?;
let archetype = &mut self.world.storage_mut().archetypes_mut()[self.archetype_idx];
let mut chunk_ranges = Vec::new();
let mut chunk_idx = archetype.get_free_chunk(self.chunkset_idx, entities.len());
let mut entities_in_chunk = 0;
for entity in entities {
let chunk = {
let chunkset = &mut archetype.chunksets_mut()[self.chunkset_idx];
let chunk = &mut chunkset[chunk_idx];
if chunk.is_full() {
chunk_ranges.push((chunk_idx, entities_in_chunk));
chunk_idx = archetype.get_free_chunk(self.chunkset_idx, 1);
let chunkset = &mut archetype.chunksets_mut()[self.chunkset_idx];
&mut chunkset[chunk_idx]
} else {
chunk
}
};
chunk.writer().get().0.push(entity);
entities_in_chunk += 1;
}
if entities_in_chunk > 0 {
chunk_ranges.push((chunk_idx, entities_in_chunk));
}
Ok(chunk_ranges)
}
}
struct ComponentsDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: ArchetypeIndex,
chunkset_idx: SetIndex,
chunk_ranges: &'a Vec<(ChunkIndex, usize)>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ComponentsDeserializer<'a, 'b, WD>
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(self)
}
}
impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for ComponentsDeserializer<'a, 'b, WD> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("sequence of objects")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let archetype = &mut self.world.storage_mut().archetypes_mut()[self.archetype_idx];
for idx in 0..archetype.description().components().len() {
let desc = archetype.description();
let (comp_type, comp_meta) = desc.components()[idx];
let mut chunkset = &mut archetype.chunksets_mut()[self.chunkset_idx];
if seq
.next_element_seed(ComponentDataDeserializer {
user: self.user,
comp_type: &comp_type,
comp_meta: &comp_meta,
chunkset: &mut chunkset,
chunk_ranges: self.chunk_ranges,
})?
.is_none()
{
break;
}
}
Ok(())
}
}
struct ComponentDataDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
comp_type: &'a ComponentTypeId,
comp_meta: &'a ComponentMeta,
chunkset: &'a mut Chunkset,
chunk_ranges: &'a Vec<(ChunkIndex, usize)>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ComponentDataDeserializer<'a, 'b, WD>
{
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
let mut range_idx = 0;
self.user.deserialize_components(
deserializer,
self.comp_type,
self.comp_meta,
&mut || -> Option<(NonNull<u8>, usize)> {
self.chunk_ranges.get(range_idx).map(|&(chunk, size)| {
range_idx += 1;
let chunk = &mut self.chunkset[chunk];
unsafe {
let comp_storage = (&mut *chunk.writer().get().1.get())
.get_mut(*self.comp_type)
.expect(
"expected ComponentResourceSet when deserializing component data",
);
(comp_storage.writer().reserve_raw(size), size)
}
})
},
)?;
Ok(())
}
}

View file

@ -1,2 +0,0 @@
pub mod de;
pub mod ser;

View file

@ -1,309 +0,0 @@
use crate::{
entity::Entity,
storage::{
ArchetypeData, ArchetypeDescription, ComponentMeta, ComponentResourceSet, ComponentStorage,
ComponentTypeId, TagMeta, TagStorage, TagTypeId,
},
world::World,
};
use serde::{ser::SerializeStruct, Serialize, Serializer};
/// Implements `Serialize` and can be passed to a `serde::Serializer`.
pub struct WorldSerializable<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
world: &'a World,
}
/// Returns a value that implements `Serialize` and can be passed to a `serde::Serializer`.
pub fn serializable_world<'a, 'b, CS: WorldSerializer>(
world: &'a World,
serialize_impl: &'b CS,
) -> WorldSerializable<'a, 'b, CS> {
WorldSerializable {
world,
world_serializer: serialize_impl,
}
}
/*
// Structure optimized for saving and loading:
[
(
// Description of archetype
archetype: {},
// Tag data arrays. One inner array per chunk set. Indices match chunk set indices
tags: [
// Tag values. One element per chunk set. Indices match chunk set indices
[TAG_DATA]
],
chunksets: [
// CHUNK SET. One array element per array of chunks in the chunkset
[
// CHUNK
(
// ENTITIES in the chunk
entities: [Entity],
// COMPONENT STORAGE: One array per component type, as per the archetype.
// Component type indices in archetype correspond to indices here
components: [
// COMPONENT RESOURCE SET: The actual component data. One element per entity
[COMPONENT_DATA],
...
],
),
...
],
...
],
),
...
]
*/
/// User must implement this trait to serialize a World.
pub trait WorldSerializer {
/// Returns whether this serializer can serialize a tag type or not.
/// If not, the tag type will not be passed to `serialize_tags`.
fn can_serialize_tag(&self, ty: &TagTypeId, meta: &TagMeta) -> bool;
/// Returns whether this serializer can serialize a component type or not.
/// If not, the component type will not be passed to `serialize_components`.
fn can_serialize_component(&self, ty: &ComponentTypeId, meta: &ComponentMeta) -> bool;
/// Serialize an `ArchetypeDescription`
fn serialize_archetype_description<S: Serializer>(
&self,
serializer: S,
archetype_desc: &ArchetypeDescription,
) -> Result<S::Ok, S::Error>;
/// Serialize all data in a `ComponentResourceSet`.
fn serialize_components<S: Serializer>(
&self,
serializer: S,
component_type: &ComponentTypeId,
component_meta: &ComponentMeta,
components: &ComponentResourceSet,
) -> Result<S::Ok, S::Error>;
/// Serialize all tags in a `TagStorage`.
fn serialize_tags<S: Serializer>(
&self,
serializer: S,
tag_type: &TagTypeId,
tag_meta: &TagMeta,
tags: &TagStorage,
) -> Result<S::Ok, S::Error>;
/// Serialize entities in the provided slice.
fn serialize_entities<S: Serializer>(
&self,
serializer: S,
entities: &[Entity],
) -> Result<S::Ok, S::Error>;
}
impl<'a, 'b, CS: WorldSerializer> Serialize for WorldSerializable<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let storage = self.world.storage();
serializer.collect_seq(
storage
.archetypes()
.iter()
.filter_map(|archetype| {
let valid_tags = archetype
.description()
.tags()
.iter()
.enumerate()
.filter(|(_, (ty, meta))| self.world_serializer.can_serialize_tag(ty, meta))
.map(|(idx, (ty, meta))| (idx, ty, meta))
.collect::<Vec<_>>();
let valid_components = archetype
.description()
.components()
.iter()
.enumerate()
.filter(|(_, (ty, meta))| {
self.world_serializer.can_serialize_component(ty, meta)
})
.map(|(idx, (ty, meta))| (idx, ty, meta))
.collect::<Vec<_>>();
if !valid_tags.is_empty() || !valid_components.is_empty() {
Some(ArchetypeSerializer {
world_serializer: self.world_serializer,
archetype,
valid_tags,
valid_components,
})
} else {
None
}
})
.collect::<Vec<_>>(),
)
}
}
struct ArchetypeSerializer<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
archetype: &'a ArchetypeData,
valid_tags: Vec<(usize, &'a TagTypeId, &'a TagMeta)>,
valid_components: Vec<(usize, &'a ComponentTypeId, &'a ComponentMeta)>,
}
impl<'a, 'b, CS: WorldSerializer> Serialize for ArchetypeSerializer<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut archetype = serializer.serialize_struct("Archetype", 3)?;
let desc = self.archetype.description();
archetype.serialize_field(
"description",
&ArchetypeDescriptionSerializer {
world_serializer: self.world_serializer,
desc,
},
)?;
let tags: Vec<_> = self
.valid_tags
.iter()
.map(|(_idx, ty, meta)| {
let tag_storage = self
.archetype
.tags()
.get(**ty)
.expect("tag type in archetype but not in storage");
TagSerializer {
world_serializer: self.world_serializer,
ty,
meta,
tag_storage,
}
})
.collect();
archetype.serialize_field("tags", &tags)?;
let chunksets: Vec<_> = self
.archetype
.chunksets()
.iter()
.map(|chunkset| {
chunkset
.occupied()
.iter()
.map(|comp_storage| ChunkSerializer {
world_serializer: self.world_serializer,
desc,
comp_storage,
valid_components: &self.valid_components,
})
.collect::<Vec<_>>()
})
.collect();
archetype.serialize_field("chunk_sets", &chunksets)?;
archetype.end()
}
}
struct ArchetypeDescriptionSerializer<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
desc: &'a ArchetypeDescription,
}
impl<'a, 'b, CS: WorldSerializer> Serialize for ArchetypeDescriptionSerializer<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.world_serializer
.serialize_archetype_description(serializer, self.desc)
}
}
struct TagSerializer<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
ty: &'a TagTypeId,
meta: &'a TagMeta,
tag_storage: &'a TagStorage,
}
impl<'a, 'b, CS: WorldSerializer> Serialize for TagSerializer<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.world_serializer
.serialize_tags(serializer, self.ty, self.meta, self.tag_storage)
}
}
struct ChunkSerializer<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
desc: &'a ArchetypeDescription,
comp_storage: &'a ComponentStorage,
valid_components: &'a Vec<(usize, &'a ComponentTypeId, &'a ComponentMeta)>,
}
impl<'a, 'b, CS: WorldSerializer> Serialize for ChunkSerializer<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut chunk = serializer.serialize_struct("Chunk", 2)?;
chunk.serialize_field(
"entities",
&EntitySerializer {
world_serializer: self.world_serializer,
entities: self.comp_storage.entities(),
},
)?;
let comp_storages: Vec<_> = self
.valid_components
.iter()
.map(|(_idx, ty, meta)| {
let comp_resources = self
.comp_storage
.components(**ty)
.expect("component type in archetype but not in storage");
ComponentResourceSetSerializer {
world_serializer: self.world_serializer,
ty,
meta,
comp_resources,
}
})
.collect();
chunk.serialize_field("components", &comp_storages)?;
chunk.end()
}
}
struct ComponentResourceSetSerializer<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
ty: &'a ComponentTypeId,
meta: &'a ComponentMeta,
comp_resources: &'a ComponentResourceSet,
}
impl<'a, 'b, CS: WorldSerializer> Serialize for ComponentResourceSetSerializer<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.world_serializer.serialize_components(
serializer,
self.ty,
self.meta,
self.comp_resources,
)
}
}
struct EntitySerializer<'a, 'b, CS: WorldSerializer> {
world_serializer: &'b CS,
entities: &'a [Entity],
}
impl<'a, 'b, CS: WorldSerializer> Serialize for EntitySerializer<'a, 'b, CS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.world_serializer
.serialize_entities(serializer, self.entities)
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,442 +0,0 @@
// This is required to be copied in because PartialEq is only implemented up to 14 elements on a tuple.
// This implements our own Eq for up to 26 parameters (A-Z)
use std::cmp::*;
pub trait TupleEq<T: ?Sized = Self> {
fn legion_eq(&self, other: &T) -> bool;
fn legion_ne(&self, other: &T) -> bool;
}
// macro for implementing n-ary tuple functions and operations
macro_rules! tuple_impls {
($(
$Tuple:ident {
$(($idx:tt) -> $T:ident)+
}
)+) => {
$(
impl<$($T:PartialEq),+> TupleEq for ($($T,)+) where last_type!($($T,)+): ?Sized {
#[inline]
fn legion_eq(&self, other: &($($T,)+)) -> bool {
$(self.$idx == other.$idx)&&+
}
#[inline]
fn legion_ne(&self, other: &($($T,)+)) -> bool {
$(self.$idx != other.$idx)||+
}
}
)+
}
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
tuple_impls! {
Tuple1 {
(0) -> A
}
Tuple2 {
(0) -> A
(1) -> B
}
Tuple3 {
(0) -> A
(1) -> B
(2) -> C
}
Tuple4 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
}
Tuple5 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
}
Tuple6 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
}
Tuple7 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
}
Tuple8 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
}
Tuple9 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
}
Tuple10 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
}
Tuple11 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
}
Tuple12 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
}
Tuple13 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
}
Tuple14 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
}
Tuple15 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
}
Tuple16 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
}
Tuple17 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
}
Tuple18 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
}
Tuple19 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
}
Tuple20 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
}
Tuple21 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
(20) -> U
}
Tuple22 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
(20) -> U
(21) -> V
}
Tuple23 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
(20) -> U
(21) -> V
(22) -> W
}
Tuple24 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
(20) -> U
(21) -> V
(22) -> W
(23) -> X
}
Tuple25 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
(20) -> U
(21) -> V
(22) -> W
(23) -> X
(24) -> Y
}
Tuple26 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
(12) -> M
(13) -> N
(14) -> O
(15) -> P
(16) -> Q
(17) -> R
(18) -> S
(19) -> T
(20) -> U
(21) -> V
(22) -> W
(23) -> X
(24) -> Y
(25) -> Z
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,139 +0,0 @@
// This is copied in from itertools because they only implement zip for up to 8 elements.
// This implements our own zip up to 26 elements.
// Copyright (c) 2015
// https://github.com/rust-itertools/itertools
//
//Permission is hereby granted, free of charge, to any
//person obtaining a copy of this software and associated
//documentation files (the "Software"), to deal in the
//Software without restriction, including without
//limitation the rights to use, copy, modify, merge,
//publish, distribute, sublicense, and/or sell copies of
//the Software, and to permit persons to whom the Software
//is furnished to do so, subject to the following
//conditions:
//
//The above copyright notice and this permission notice
//shall be included in all copies or substantial portions
//of the Software.
//
//THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
//ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
//TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
//PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
//SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
//CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
//OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
//IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
//DEALINGS IN THE SOFTWARE.
#[inline]
pub fn min(a: (usize, Option<usize>), b: (usize, Option<usize>)) -> (usize, Option<usize>) {
let (a_lower, a_upper) = a;
let (b_lower, b_upper) = b;
let lower = std::cmp::min(a_lower, b_lower);
let upper = match (a_upper, b_upper) {
(Some(u1), Some(u2)) => Some(std::cmp::min(u1, u2)),
_ => a_upper.or(b_upper),
};
(lower, upper)
}
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Zip<T> {
t: T,
}
pub fn multizip<T, U>(t: U) -> Zip<T>
where
Zip<T>: From<U>,
Zip<T>: Iterator,
{
Zip::from(t)
}
macro_rules! impl_zip_iter {
($($B:ident),*) => (
#[allow(non_snake_case)]
impl<$($B: IntoIterator),*> From<($($B,)*)> for Zip<($($B::IntoIter,)*)> {
fn from(t: ($($B,)*)) -> Self {
let ($($B,)*) = t;
Zip { t: ($($B.into_iter(),)*) }
}
}
#[allow(non_snake_case)]
#[allow(unused_assignments)]
impl<$($B),*> Iterator for Zip<($($B,)*)>
where
$(
$B: Iterator,
)*
{
type Item = ($($B::Item,)*);
fn next(&mut self) -> Option<Self::Item>
{
let ($(ref mut $B,)*) = self.t;
// NOTE: Just like iter::Zip, we check the iterators
// for None in order. We may finish unevenly (some
// iterators gave n + 1 elements, some only n).
$(
let $B = match $B.next() {
None => return None,
Some(elt) => elt
};
)*
Some(($($B,)*))
}
#[allow(clippy::let_and_return)]
fn size_hint(&self) -> (usize, Option<usize>)
{
let sh = (::std::usize::MAX, None);
let ($(ref $B,)*) = self.t;
$(
let sh = min($B.size_hint(), sh);
)*
sh
}
}
#[allow(non_snake_case)]
impl<$($B),*> ExactSizeIterator for Zip<($($B,)*)> where
$(
$B: ExactSizeIterator,
)*
{ }
);
}
impl_zip_iter!(A);
impl_zip_iter!(A, B);
impl_zip_iter!(A, B, C);
impl_zip_iter!(A, B, C, D);
impl_zip_iter!(A, B, C, D, E);
impl_zip_iter!(A, B, C, D, E, F);
impl_zip_iter!(A, B, C, D, E, F, G);
impl_zip_iter!(A, B, C, D, E, F, G, H);
impl_zip_iter!(A, B, C, D, E, F, G, H, I);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y);
impl_zip_iter!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);

View file

@ -1,35 +0,0 @@
[package]
name = "legion-systems"
version = "0.2.1"
description = "High performance entity component system (ECS) library"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
repository = "https://github.com/TomGillen/legion"
keywords = ["ecs", "game"]
categories = ["game-engines", "data-structures"]
readme = "readme.md"
license = "MIT"
edition = "2018"
[badges]
travis-ci = { repository = "TomGillen/legion", branch = "master" }
[features]
par-iter = ["rayon", "legion-core/par-iter"]
par-schedule = ["rayon", "crossbeam-queue"]
[dependencies]
legion-core = { path = "../legion_core", version = "0.2.1", default-features = false }
downcast-rs = "1.0"
itertools = "0.8"
rayon = { version = "1.2", optional = true }
crossbeam-queue = { version = "0.2.0", optional = true }
crossbeam-channel = "0.4.0"
derivative = "1"
bit-set = "0.5"
paste = "0.1"
tracing = "0.1"
fxhash = "0.2"
[dev-dependencies]
tracing-subscriber = "0.2"

View file

@ -1,16 +0,0 @@
pub mod resource;
pub mod schedule;
mod system;
pub use bit_set;
pub use system::*;
pub mod prelude {
pub use crate::{
bit_set::BitSet,
resource::{ResourceSet, Resources},
schedule::{Executor, Runnable, Schedulable, Schedule},
System, SystemBuilder,
};
}

View file

@ -1,465 +0,0 @@
use downcast_rs::{impl_downcast, Downcast};
use fxhash::FxHashMap;
use legion_core::borrow::DowncastTypename;
use legion_core::borrow::{AtomicRefCell, Ref, RefMut};
use legion_core::query::{Read, ReadOnly, Write};
use std::{
any::{type_name, Any},
marker::PhantomData,
ops::{Deref, DerefMut},
};
impl DowncastTypename for dyn Resource {
#[inline(always)]
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T> {
if self.is_typename::<T>() {
// SAFETY: just checked whether we are pointing to the correct type
unsafe { Some(&mut *(self.as_any_mut() as *mut dyn Any as *mut T)) }
} else {
None
}
}
#[inline(always)]
fn downcast_typename_ref<T: Any>(&self) -> Option<&T> {
if self.is_typename::<T>() {
// SAFETY: just checked whether we are pointing to the correct type
unsafe { Some(&*(self.as_any() as *const dyn Any as *const T)) }
} else {
None
}
}
#[inline(always)]
fn is_typename<T: Any>(&self) -> bool {
true
// TODO: it would be nice to add type safety here, but the type names don't match
// println!("{} {}", type_name_of_val(self), type_name::<T>());
// type_name_of_val(self) == type_name::<T>()
}
}
#[cfg(not(feature = "ffi"))]
/// A type ID identifying a component type.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct ResourceTypeId(&'static str);
#[cfg(not(feature = "ffi"))]
impl ResourceTypeId {
/// Gets the component type ID that represents type `T`.
pub fn of<T: Resource>() -> Self { Self(type_name::<T>()) }
}
#[cfg(feature = "ffi")]
/// A type ID identifying a component type.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct ResourceTypeId(&'static str, u32);
#[cfg(feature = "ffi")]
impl ResourceTypeId {
/// Gets the component type ID that represents type `T`.
pub fn of<T: Resource>() -> Self { Self(type_name::<T>(), 0) }
}
/// Trait which is implemented for tuples of resources and singular resources. This abstracts
/// fetching resources to allow for ergonomic fetching.
///
/// # Example:
/// ```
///
/// struct TypeA(usize);
/// struct TypeB(usize);
///
/// use legion_core::prelude::*;
/// use legion_systems::prelude::*;
/// let mut resources = Resources::default();
/// resources.insert(TypeA(55));
/// resources.insert(TypeB(12));
///
/// {
/// let (a, mut b) = <(Read<TypeA>, Write<TypeB>)>::fetch_mut(&mut resources);
/// assert_ne!(a.0, b.0);
/// b.0 = a.0;
/// }
///
/// {
/// let (a, b) = <(Read<TypeA>, Read<TypeB>)>::fetch(&resources);
/// assert_eq!(a.0, b.0);
/// }
///
/// ```
pub trait ResourceSet: Send + Sync {
type PreparedResources;
/// Fetches all defined resources, without checking mutability.
///
/// # Safety
/// It is up to the end user to validate proper mutability rules across the resources being accessed.
///
unsafe fn fetch_unchecked(resources: &Resources) -> Self::PreparedResources;
fn fetch_mut(resources: &mut Resources) -> Self::PreparedResources {
// safe because mutable borrow ensures exclusivity
unsafe { Self::fetch_unchecked(resources) }
}
fn fetch(resources: &Resources) -> Self::PreparedResources
where
Self: ReadOnly,
{
unsafe { Self::fetch_unchecked(resources) }
}
}
/// Blanket trait for resource types.
pub trait Resource: 'static + Downcast + Send + Sync {}
impl<T> Resource for T where T: 'static + Send + Sync {}
impl_downcast!(Resource);
/// Wrapper type for safe, lifetime-garunteed immutable access to a resource of type `T'. This
/// is the wrapper type which is provided to the closure in a `System`, meaning it is only scoped
/// to that system execution.
///
/// # Safety
///
/// This type contains an immutable pointer to `T`, and must not outlive its lifetime
pub struct PreparedRead<T: Resource> {
resource: *const T,
}
impl<T: Resource> PreparedRead<T> {
pub(crate) unsafe fn new(resource: *const T) -> Self { Self { resource } }
}
impl<T: Resource> Deref for PreparedRead<T> {
type Target = T;
fn deref(&self) -> &Self::Target { unsafe { &*self.resource } }
}
unsafe impl<T: Resource> Send for PreparedRead<T> {}
unsafe impl<T: Resource> Sync for PreparedRead<T> {}
/// Wrapper type for safe, lifetime-garunteed mutable access to a resource of type `T'. This
/// is the wrapper type which is provided to the closure in a `System`, meaning it is only scoped
/// to that system execution.
///
/// # Safety
///
/// This type contains an mutable pointer to `T`, and must not outlive its lifetime
pub struct PreparedWrite<T: Resource> {
resource: *mut T,
}
impl<T: Resource> Deref for PreparedWrite<T> {
type Target = T;
fn deref(&self) -> &Self::Target { unsafe { &*self.resource } }
}
impl<T: Resource> DerefMut for PreparedWrite<T> {
fn deref_mut(&mut self) -> &mut T { unsafe { &mut *self.resource } }
}
impl<T: Resource> PreparedWrite<T> {
pub(crate) unsafe fn new(resource: *mut T) -> Self { Self { resource } }
}
unsafe impl<T: Resource> Send for PreparedWrite<T> {}
unsafe impl<T: Resource> Sync for PreparedWrite<T> {}
/// Ergonomic wrapper type which contains a `Ref` type.
pub struct Fetch<'a, T: 'a + Resource> {
inner: Ref<'a, Box<dyn Resource>>,
_marker: PhantomData<T>,
}
impl<'a, T: Resource> Deref for Fetch<'a, T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
self.inner.downcast_typename_ref::<T>().unwrap_or_else(|| {
panic!(
"Unable to downcast the resource!: {}",
std::any::type_name::<T>()
)
})
}
}
impl<'a, T: 'a + Resource + std::fmt::Debug> std::fmt::Debug for Fetch<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.deref())
}
}
/// Ergonomic wrapper type which contains a `RefMut` type.
pub struct FetchMut<'a, T: Resource> {
inner: RefMut<'a, Box<dyn Resource>>,
_marker: PhantomData<T>,
}
impl<'a, T: 'a + Resource> Deref for FetchMut<'a, T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
self.inner.downcast_typename_ref::<T>().unwrap_or_else(|| {
panic!(
"Unable to downcast the resource!: {}",
std::any::type_name::<T>()
)
})
}
}
impl<'a, T: 'a + Resource> DerefMut for FetchMut<'a, T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
self.inner.downcast_typename_mut::<T>().unwrap_or_else(|| {
panic!(
"Unable to downcast the resource!: {}",
std::any::type_name::<T>()
)
})
}
}
impl<'a, T: 'a + Resource + std::fmt::Debug> std::fmt::Debug for FetchMut<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.deref())
}
}
/// Resources container. This container stores its underlying resources in a `FxHashMap` keyed on
/// `ResourceTypeId`. This means that the ID's used in this storage will not persist between recompiles.
#[derive(Default)]
pub struct Resources {
storage: FxHashMap<ResourceTypeId, AtomicRefCell<Box<dyn Resource>>>,
}
impl Resources {
/// Returns `true` if type `T` exists in the store. Otherwise, returns `false`
pub fn contains<T: Resource>(&self) -> bool {
self.storage.contains_key(&ResourceTypeId::of::<T>())
}
/// Inserts the instance of `T` into the store. If the type already exists, it will be silently
/// overwritten. If you would like to retain the instance of the resource that already exists,
/// call `remove` first to retrieve it.
pub fn insert<T: Resource>(&mut self, value: T) {
self.storage.insert(
ResourceTypeId::of::<T>(),
AtomicRefCell::new(Box::new(value)),
);
}
/// Removes the type `T` from this store if it exists.
///
/// # Returns
/// If the type `T` was stored, the inner instance of `T is returned. Otherwise, `None`
pub fn remove<T: Resource>(&mut self) -> Option<T> {
Some(
*self
.storage
.remove(&ResourceTypeId::of::<T>())?
.into_inner()
.downcast::<T>()
.ok()?,
)
}
/// Retrieve an immutable reference to `T` from the store if it exists. Otherwise, return `None`
pub fn get<T: Resource>(&self) -> Option<Fetch<'_, T>> {
Some(Fetch {
inner: self.storage.get(&ResourceTypeId::of::<T>())?.get(),
_marker: Default::default(),
})
}
/// Retrieve a mutable reference to `T` from the store if it exists. Otherwise, return `None`
pub fn get_mut<T: Resource>(&self) -> Option<FetchMut<'_, T>> {
Some(FetchMut {
inner: self.storage.get(&ResourceTypeId::of::<T>())?.get_mut(),
_marker: Default::default(),
})
}
/// Attempts to retrieve an immutable reference to `T` from the store. If it does not exist,
/// the closure `f` is called to construct the object and it is then inserted into the store.
pub fn get_or_insert_with<T: Resource, F: FnOnce() -> T>(
&mut self,
f: F,
) -> Option<Fetch<'_, T>> {
self.get_or_insert((f)())
}
/// Attempts to retrieve a mutable reference to `T` from the store. If it does not exist,
/// the closure `f` is called to construct the object and it is then inserted into the store.
pub fn get_mut_or_insert_with<T: Resource, F: FnOnce() -> T>(
&mut self,
f: F,
) -> Option<FetchMut<'_, T>> {
self.get_mut_or_insert((f)())
}
/// Attempts to retrieve an immutable reference to `T` from the store. If it does not exist,
/// the provided value is inserted and then a reference to it is returned.
pub fn get_or_insert<T: Resource>(&mut self, value: T) -> Option<Fetch<'_, T>> {
Some(Fetch {
inner: self
.storage
.entry(ResourceTypeId::of::<T>())
.or_insert_with(|| AtomicRefCell::new(Box::new(value)))
.get(),
_marker: Default::default(),
})
}
/// Attempts to retrieve a mutable reference to `T` from the store. If it does not exist,
/// the provided value is inserted and then a reference to it is returned.
pub fn get_mut_or_insert<T: Resource>(&mut self, value: T) -> Option<FetchMut<'_, T>> {
Some(FetchMut {
inner: self
.storage
.entry(ResourceTypeId::of::<T>())
.or_insert_with(|| AtomicRefCell::new(Box::new(value)))
.get_mut(),
_marker: Default::default(),
})
}
/// Attempts to retrieve an immutable reference to `T` from the store. If it does not exist,
/// the default constructor for `T` is called.
///
/// `T` must implement `Default` for this method.
pub fn get_or_default<T: Resource + Default>(&mut self) -> Option<Fetch<'_, T>> {
Some(Fetch {
inner: self
.storage
.entry(ResourceTypeId::of::<T>())
.or_insert_with(|| AtomicRefCell::new(Box::new(T::default())))
.get(),
_marker: Default::default(),
})
}
/// Attempts to retrieve a mutable reference to `T` from the store. If it does not exist,
/// the default constructor for `T` is called.
///
/// `T` must implement `Default` for this method.
pub fn get_mut_or_default<T: Resource + Default>(&mut self) -> Option<FetchMut<'_, T>> {
Some(FetchMut {
inner: self
.storage
.entry(ResourceTypeId::of::<T>())
.or_insert_with(|| AtomicRefCell::new(Box::new(T::default())))
.get_mut(),
_marker: Default::default(),
})
}
/// Performs merging of two resource storages, which occurs during a world merge.
/// This merge will retain any already-existant resources in the local world, while moving any
/// new resources from the source world into this one, consuming the resources.
pub fn merge(&mut self, mut other: Resources) {
// Merge resources, retaining our local ones but moving in any non-existant ones
for resource in other.storage.drain() {
self.storage.entry(resource.0).or_insert(resource.1);
}
}
}
impl ResourceSet for () {
type PreparedResources = ();
unsafe fn fetch_unchecked(_: &Resources) {}
}
impl<T: Resource> ResourceSet for Read<T> {
type PreparedResources = PreparedRead<T>;
unsafe fn fetch_unchecked(resources: &Resources) -> Self::PreparedResources {
let resource = resources
.get::<T>()
.unwrap_or_else(|| panic!("Failed to fetch resource!: {}", std::any::type_name::<T>()));
PreparedRead::new(resource.deref() as *const T)
}
}
impl<T: Resource> ResourceSet for Write<T> {
type PreparedResources = PreparedWrite<T>;
unsafe fn fetch_unchecked(resources: &Resources) -> Self::PreparedResources {
let mut resource = resources
.get_mut::<T>()
.unwrap_or_else(|| panic!("Failed to fetch resource!: {}", std::any::type_name::<T>()));
PreparedWrite::new(resource.deref_mut() as *mut T)
}
}
macro_rules! impl_resource_tuple {
( $( $ty: ident ),* ) => {
#[allow(unused_parens, non_snake_case)]
impl<$( $ty: ResourceSet ),*> ResourceSet for ($( $ty, )*)
{
type PreparedResources = ($( $ty::PreparedResources, )*);
unsafe fn fetch_unchecked(resources: &Resources) -> Self::PreparedResources {
($( $ty::fetch_unchecked(resources), )*)
}
}
};
}
//($( $ty, )*)
impl_resource_tuple!(A);
impl_resource_tuple!(A, B);
impl_resource_tuple!(A, B, C);
impl_resource_tuple!(A, B, C, D);
impl_resource_tuple!(A, B, C, D, E);
impl_resource_tuple!(A, B, C, D, E, F);
impl_resource_tuple!(A, B, C, D, E, F, G);
impl_resource_tuple!(A, B, C, D, E, F, G, H);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y);
impl_resource_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple_read_write_test() {
let _ = tracing_subscriber::fmt::try_init();
struct TestOne {
value: String,
}
struct TestTwo {
value: String,
}
let mut resources = Resources::default();
resources.insert(TestOne {
value: "poop".to_string(),
});
resources.insert(TestTwo {
value: "balls".to_string(),
});
assert_eq!(resources.get::<TestOne>().unwrap().value, "poop");
assert_eq!(resources.get::<TestTwo>().unwrap().value, "balls");
// test re-ownership
let owned = resources.remove::<TestTwo>();
assert_eq!(owned.unwrap().value, "balls")
}
}

View file

@ -1,637 +0,0 @@
use crate::{
resource::{ResourceTypeId, Resources},
system::SystemId,
};
use bit_set::BitSet;
use legion_core::{
borrow::RefMut,
command::CommandBuffer,
storage::ComponentTypeId,
world::{World, WorldId},
};
use std::cell::UnsafeCell;
#[cfg(feature = "par-schedule")]
use tracing::{span, trace, Level};
#[cfg(feature = "par-schedule")]
use std::sync::atomic::{AtomicUsize, Ordering};
#[cfg(feature = "par-schedule")]
use fxhash::{FxHashMap, FxHashSet};
#[cfg(feature = "par-schedule")]
use rayon::prelude::*;
#[cfg(feature = "par-schedule")]
use itertools::izip;
#[cfg(feature = "par-schedule")]
use std::iter::repeat;
/// Empty trait which defines a `System` as schedulable by the dispatcher - this requires that the
/// type is both `Send` and `Sync`.
///
/// This is automatically implemented for all types that implement `Runnable` which meet the requirements.
pub trait Schedulable: Runnable + Send + Sync {}
impl<T> Schedulable for T where T: Runnable + Send + Sync {}
/// Describes which archetypes a system declares access to.
pub enum ArchetypeAccess {
/// All archetypes.
All,
/// Some archetypes.
Some(BitSet),
}
impl ArchetypeAccess {
pub fn is_disjoint(&self, other: &ArchetypeAccess) -> bool {
match self {
Self::All => false,
Self::Some(mine) => match other {
Self::All => false,
Self::Some(theirs) => mine.is_disjoint(theirs),
},
}
}
}
/// Trait describing a schedulable type. This is implemented by `System`
pub trait Runnable {
/// Gets the name of the system.
fn name(&self) -> &SystemId;
/// Gets the resources and component types read by the system.
fn reads(&self) -> (&[ResourceTypeId], &[ComponentTypeId]);
/// Gets the resources and component types written by the system.
fn writes(&self) -> (&[ResourceTypeId], &[ComponentTypeId]);
/// Prepares the system for execution against a world.
fn prepare(&mut self, world: &World);
/// Gets the set of archetypes the system will access when run,
/// as determined when the system was last prepared.
fn accesses_archetypes(&self) -> &ArchetypeAccess;
/// Runs the system.
///
/// # Safety
///
/// The shared references to world and resources may result in
/// unsound mutable aliasing if other code is accessing the same components or
/// resources as this system. Prefer to use `run` when possible.
unsafe fn run_unsafe(&mut self, world: &World, resources: &Resources);
/// Gets the system's command buffer.
fn command_buffer_mut(&self, world: WorldId) -> Option<RefMut<CommandBuffer>>;
/// Runs the system.
fn run(&mut self, world: &mut World, resources: &mut Resources) {
unsafe { self.run_unsafe(world, resources) };
}
}
/// Executes a sequence of systems, potentially in parallel, and then commits their command buffers.
///
/// Systems are provided in execution order. When the `par-schedule` feature is enabled, the `Executor`
/// may run some systems in parallel. The order in which side-effects (e.g. writes to resources
/// or entities) are observed is maintained.
pub struct Executor {
systems: Vec<SystemBox>,
#[cfg(feature = "par-schedule")]
static_dependants: Vec<Vec<usize>>,
#[cfg(feature = "par-schedule")]
dynamic_dependants: Vec<Vec<usize>>,
#[cfg(feature = "par-schedule")]
static_dependency_counts: Vec<AtomicUsize>,
#[cfg(feature = "par-schedule")]
awaiting: Vec<AtomicUsize>,
}
struct SystemBox(UnsafeCell<Box<dyn Schedulable>>);
// NOT SAFE:
// This type is only safe to use as Send and Sync within
// the constraints of how it is used inside Executor
unsafe impl Send for SystemBox {}
unsafe impl Sync for SystemBox {}
impl SystemBox {
#[cfg(feature = "par-schedule")]
unsafe fn get(&self) -> &dyn Schedulable { std::ops::Deref::deref(&*self.0.get()) }
#[allow(clippy::mut_from_ref)]
unsafe fn get_mut(&self) -> &mut dyn Schedulable {
std::ops::DerefMut::deref_mut(&mut *self.0.get())
}
}
impl Executor {
/// Constructs a new executor for all systems to be run in a single stage.
///
/// Systems are provided in the order in which side-effects (e.g. writes to resources or entities)
/// are to be observed.
#[cfg(not(feature = "par-schedule"))]
pub fn new(systems: Vec<Box<dyn Schedulable>>) -> Self {
Self {
systems: systems
.into_iter()
.map(|s| SystemBox(UnsafeCell::new(s)))
.collect(),
}
}
/// Constructs a new executor for all systems to be run in a single stage.
///
/// Systems are provided in the order in which side-effects (e.g. writes to resources or entities)
/// are to be observed.
#[cfg(feature = "par-schedule")]
#[allow(clippy::cognitive_complexity)]
// TODO: we should break this up
pub fn new(systems: Vec<Box<dyn Schedulable>>) -> Self {
if systems.len() > 1 {
let mut static_dependency_counts = Vec::with_capacity(systems.len());
let mut static_dependants: Vec<Vec<_>> =
repeat(Vec::with_capacity(64)).take(systems.len()).collect();
let mut dynamic_dependants: Vec<Vec<_>> =
repeat(Vec::with_capacity(64)).take(systems.len()).collect();
let mut resource_last_mutated =
FxHashMap::<ResourceTypeId, usize>::with_capacity_and_hasher(
64,
Default::default(),
);
let mut resource_last_read =
FxHashMap::<ResourceTypeId, usize>::with_capacity_and_hasher(
64,
Default::default(),
);
let mut component_last_mutated =
FxHashMap::<ComponentTypeId, usize>::with_capacity_and_hasher(
64,
Default::default(),
);
let mut component_last_read =
FxHashMap::<ComponentTypeId, usize>::with_capacity_and_hasher(
64,
Default::default(),
);
for (i, system) in systems.iter().enumerate() {
let span = span!(
Level::TRACE,
"Building system dependencies",
system = %system.name(),
index = i,
);
let _guard = span.enter();
let (read_res, read_comp) = system.reads();
let (write_res, write_comp) = system.writes();
// find resource access dependencies
let mut dependencies = FxHashSet::with_capacity_and_hasher(64, Default::default());
for res in read_res {
trace!(resource = ?res, "Read resource");
if let Some(n) = resource_last_mutated.get(res) {
trace!(system_index = n, "Added write dependency");
dependencies.insert(*n);
}
resource_last_read.insert(*res, i);
}
for res in write_res {
trace!(resource = ?res, "Write resource");
// Writes have to be exclusive, so we are dependent on reads too
if let Some(n) = resource_last_read.get(res) {
trace!(system_index = n, "Added read dependency");
dependencies.insert(*n);
}
if let Some(n) = resource_last_mutated.get(res) {
trace!(system_index = n, "Added write dependency");
dependencies.insert(*n);
}
resource_last_mutated.insert(*res, i);
}
static_dependency_counts.push(AtomicUsize::from(dependencies.len()));
trace!(dependants = ?dependencies, "Computed static dependants");
for dep in dependencies {
static_dependants[dep].push(i);
}
// find component access dependencies
let mut comp_dependencies = FxHashSet::default();
for comp in write_comp {
// Writes have to be exclusive, so we are dependent on reads too
trace!(component = ?comp, "Write component");
if let Some(n) = component_last_read.get(comp) {
trace!(system_index = n, "Added read dependency");
comp_dependencies.insert(*n);
}
if let Some(n) = component_last_mutated.get(comp) {
trace!(system_index = n, "Added write dependency");
comp_dependencies.insert(*n);
}
component_last_mutated.insert(*comp, i);
}
// Do reads after writes to ensure we don't overwrite last_read
for comp in read_comp {
trace!(component = ?comp, "Read component");
if let Some(n) = component_last_mutated.get(comp) {
trace!(system_index = n, "Added write dependency");
comp_dependencies.insert(*n);
}
component_last_read.insert(*comp, i);
}
trace!(depentants = ?comp_dependencies, "Computed dynamic dependants");
for dep in comp_dependencies {
if dep != i {
// dont be dependent on ourselves
dynamic_dependants[dep].push(i);
}
}
}
trace!(
?static_dependants,
?dynamic_dependants,
"Computed system dependencies"
);
let mut awaiting = Vec::with_capacity(systems.len());
systems
.iter()
.for_each(|_| awaiting.push(AtomicUsize::new(0)));
Executor {
awaiting,
static_dependants,
dynamic_dependants,
static_dependency_counts,
systems: systems
.into_iter()
.map(|s| SystemBox(UnsafeCell::new(s)))
.collect(),
}
} else {
Executor {
awaiting: Vec::with_capacity(0),
static_dependants: Vec::with_capacity(0),
dynamic_dependants: Vec::with_capacity(0),
static_dependency_counts: Vec::with_capacity(0),
systems: systems
.into_iter()
.map(|s| SystemBox(UnsafeCell::new(s)))
.collect(),
}
}
}
/// Converts this executor into a vector of its component systems.
pub fn into_vec(self) -> Vec<Box<dyn Schedulable>> {
self.systems.into_iter().map(|s| s.0.into_inner()).collect()
}
/// Executes all systems and then flushes their command buffers.
pub fn execute(&mut self, world: &mut World, resources: &mut Resources) {
self.run_systems(world, resources);
self.flush_command_buffers(world);
}
/// Executes all systems sequentially.
///
/// Only enabled with par-schedule is disabled
#[cfg(not(feature = "par-schedule"))]
pub fn run_systems(&mut self, world: &mut World, resources: &mut Resources) {
self.systems.iter_mut().for_each(|system| {
let system = unsafe { system.get_mut() };
system.run(world, resources);
});
}
/// Executes all systems, potentially in parallel.
///
/// Ordering is retained in so far as the order of observed resource and component
/// accesses is maintained.
///
/// Call from within `rayon::ThreadPool::install()` to execute within a specific thread pool.
#[cfg(feature = "par-schedule")]
pub fn run_systems(&mut self, world: &mut World, resources: &mut Resources) {
rayon::join(
|| {},
|| {
match self.systems.len() {
1 => {
// safety: we have exlusive access to all systems, world and resources here
unsafe { self.systems[0].get_mut().run(world, resources) };
}
_ => {
let systems = &mut self.systems;
let static_dependency_counts = &self.static_dependency_counts;
let awaiting = &mut self.awaiting;
// prepare all systems - archetype filters are pre-executed here
systems
.par_iter_mut()
.for_each(|sys| unsafe { sys.get_mut() }.prepare(world));
// determine dynamic dependencies
izip!(
systems.iter(),
self.static_dependants.iter_mut(),
self.dynamic_dependants.iter_mut()
)
.par_bridge()
.for_each(|(sys, static_dep, dyn_dep)| {
// safety: systems is held exclusively, and we are only reading each system
let archetypes = unsafe { sys.get() }.accesses_archetypes();
for i in (0..dyn_dep.len()).rev() {
let dep = dyn_dep[i];
let other = unsafe { systems[dep].get() };
// if the archetype sets intersect,
// then we can move the dynamic dependant into the static dependants set
if !other.accesses_archetypes().is_disjoint(archetypes) {
static_dep.push(dep);
dyn_dep.swap_remove(i);
static_dependency_counts[dep].fetch_add(1, Ordering::Relaxed);
}
}
});
// initialize dependency tracking
for (i, count) in static_dependency_counts.iter().enumerate() {
awaiting[i].store(count.load(Ordering::Relaxed), Ordering::Relaxed);
}
let awaiting = &self.awaiting;
// execute all systems with no outstanding dependencies
(0..systems.len())
.filter(|i| awaiting[*i].load(Ordering::SeqCst) == 0)
.for_each(|i| {
// safety: we are at the root of the execution tree, so we know each
// index is exclusive here
unsafe { self.run_recursive(i, world, resources) };
});
}
}
},
);
}
/// Flushes the recorded command buffers for all systems.
pub fn flush_command_buffers(&mut self, world: &mut World) {
self.systems.iter().for_each(|system| {
// safety: systems are exlcusive due to &mut self
let system = unsafe { system.get_mut() };
if let Some(mut cmd) = system.command_buffer_mut(world.id()) {
cmd.write(world);
}
});
}
/// Recursively execute through the generated depedency cascade and exhaust it.
///
/// # Safety
///
/// Ensure the system indexed by `i` is only accessed once.
#[cfg(feature = "par-schedule")]
unsafe fn run_recursive(&self, i: usize, world: &World, resources: &Resources) {
// safety: the caller ensures nothing else is accessing systems[i]
self.systems[i].get_mut().run_unsafe(world, resources);
self.static_dependants[i].par_iter().for_each(|dep| {
match self.awaiting[*dep].compare_exchange(
1,
std::usize::MAX,
Ordering::Relaxed,
Ordering::Relaxed,
) {
Ok(_) => {
// safety: each dependency is unique, so run_recursive is safe to call
self.run_recursive(*dep, world, resources);
}
Err(_) => {
self.awaiting[*dep].fetch_sub(1, Ordering::Relaxed);
}
}
});
}
}
/// A factory for `Schedule`.
pub struct Builder {
steps: Vec<Step>,
accumulator: Vec<Box<dyn Schedulable>>,
}
impl Builder {
/// Adds a system to the schedule.
pub fn add_system<T: Into<Box<dyn Schedulable>>>(mut self, system: T) -> Self {
self.accumulator.push(system.into());
self
}
/// Waits for executing systems to complete, and the flushes all outstanding system
/// command buffers.
pub fn flush(mut self) -> Self {
self.finalize_executor();
self.steps.push(Step::FlushCmdBuffers);
self
}
fn finalize_executor(&mut self) {
if !self.accumulator.is_empty() {
let mut systems = Vec::new();
std::mem::swap(&mut self.accumulator, &mut systems);
let executor = Executor::new(systems);
self.steps.push(Step::Systems(executor));
}
}
/// Adds a thread local function to the schedule. This function will be executed on the main thread.
pub fn add_thread_local_fn<F: FnMut(&mut World, &mut Resources) + 'static>(
mut self,
f: F,
) -> Self {
self.finalize_executor();
self.steps.push(Step::ThreadLocalFn(
Box::new(f) as Box<dyn FnMut(&mut World, &mut Resources)>
));
self
}
/// Adds a thread local system to the schedule. This system will be executed on the main thread.
pub fn add_thread_local<S: Into<Box<dyn Runnable>>>(self, system: S) -> Self {
let mut system = system.into();
self.add_thread_local_fn(move |world, resources| system.run(world, resources))
}
/// Finalizes the builder into a `Schedule`.
pub fn build(self) -> Schedule { self.into() }
}
impl Default for Builder {
fn default() -> Self {
Self {
steps: Vec::new(),
accumulator: Vec::new(),
}
}
}
/// A step in a schedule.
pub enum Step {
/// A batch of systems.
Systems(Executor),
/// Flush system command buffers.
FlushCmdBuffers,
/// A thread local function.
ThreadLocalFn(Box<dyn FnMut(&mut World, &mut Resources)>),
}
/// A schedule of systems for execution.
///
/// # Examples
///
/// ```rust
/// # use legion_core::prelude::*;
/// # use legion_systems::prelude::*;
/// # let find_collisions = SystemBuilder::new("find_collisions").build(|_,_,_,_| {});
/// # let calculate_acceleration = SystemBuilder::new("calculate_acceleration").build(|_,_,_,_| {});
/// # let update_positions = SystemBuilder::new("update_positions").build(|_,_,_,_| {});
/// let mut world = World::new();
/// let mut resources = Resources::default();
/// let mut schedule = Schedule::builder()
/// .add_system(find_collisions)
/// .flush()
/// .add_system(calculate_acceleration)
/// .add_system(update_positions)
/// .build();
///
/// schedule.execute(&mut world, &mut resources);
/// ```
pub struct Schedule {
steps: Vec<Step>,
}
impl Schedule {
/// Creates a new schedule builder.
pub fn builder() -> Builder { Builder::default() }
/// Executes all of the steps in the schedule.
pub fn execute(&mut self, world: &mut World, resources: &mut Resources) {
let mut waiting_flush: Vec<&mut Executor> = Vec::new();
for step in &mut self.steps {
match step {
Step::Systems(executor) => {
executor.run_systems(world, resources);
waiting_flush.push(executor);
}
Step::FlushCmdBuffers => waiting_flush
.drain(..)
.for_each(|e| e.flush_command_buffers(world)),
Step::ThreadLocalFn(function) => function(world, resources),
}
}
}
/// Converts the schedule into a vector of steps.
pub fn into_vec(self) -> Vec<Step> { self.steps }
}
impl From<Builder> for Schedule {
fn from(builder: Builder) -> Self {
Self {
steps: builder.flush().steps,
}
}
}
impl From<Vec<Step>> for Schedule {
fn from(steps: Vec<Step>) -> Self { Self { steps } }
}
#[cfg(test)]
mod tests {
use super::*;
use crate::prelude::*;
use itertools::sorted;
use legion_core::prelude::*;
use std::sync::{Arc, Mutex};
#[test]
fn execute_in_order() {
let universe = Universe::new();
let mut world = universe.create_world();
#[derive(Default)]
struct Resource;
let mut resources = Resources::default();
resources.insert(Resource);
let order = Arc::new(Mutex::new(Vec::new()));
let order_clone = order.clone();
let system_one = SystemBuilder::new("one")
.write_resource::<Resource>()
.build(move |_, _, _, _| order_clone.lock().unwrap().push(1usize));
let order_clone = order.clone();
let system_two = SystemBuilder::new("two")
.write_resource::<Resource>()
.build(move |_, _, _, _| order_clone.lock().unwrap().push(2usize));
let order_clone = order.clone();
let system_three = SystemBuilder::new("three")
.write_resource::<Resource>()
.build(move |_, _, _, _| order_clone.lock().unwrap().push(3usize));
let mut schedule = Schedule::builder()
.add_system(system_one)
.add_system(system_two)
.add_system(system_three)
.build();
schedule.execute(&mut world, &mut resources);
let order = order.lock().unwrap();
let sorted: Vec<usize> = sorted(order.clone()).collect();
assert_eq!(*order, sorted);
}
#[test]
fn flush() {
let universe = Universe::new();
let mut world = universe.create_world();
let mut resources = Resources::default();
#[derive(Clone, Copy, Debug, PartialEq)]
struct TestComp(f32, f32, f32);
let system_one = SystemBuilder::new("one").build(move |cmd, _, _, _| {
cmd.insert((), vec![(TestComp(0., 0., 0.),)]);
});
let system_two = SystemBuilder::new("two")
.with_query(Write::<TestComp>::query())
.build(move |_, world, _, query| assert_eq!(0, query.iter_mut(world).count()));
let system_three = SystemBuilder::new("three")
.with_query(Write::<TestComp>::query())
.build(move |_, world, _, query| assert_eq!(1, query.iter_mut(world).count()));
let mut schedule = Schedule::builder()
.add_system(system_one)
.add_system(system_two)
.flush()
.add_system(system_three)
.build();
schedule.execute(&mut world, &mut resources);
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,172 +0,0 @@
# Legion
[![Build Status][build_img]][build_lnk] [![Crates.io][crates_img]][crates_lnk] [![Docs.rs][doc_img]][doc_lnk]
[build_img]: https://github.com/TomGillen/legion/workflows/CI/badge.svg
[build_lnk]: https://github.com/TomGillen/legion/actions
[crates_img]: https://img.shields.io/crates/v/legion.svg
[crates_lnk]: https://crates.io/crates/legion
[doc_img]: https://docs.rs/legion/badge.svg
[doc_lnk]: https://docs.rs/legion
Legion aims to be a feature rich high performance ECS library for Rust game projects with minimal boilerplate.
## Bevy Fork Info
This is a fork that enables dynamic plugin loading in bevy.
Here are the changes made:
* ResourceTypeId, ComponentTypeId, TagTypeId use static str (std::any::type_name) instead of TypeId (std::any::TypeId is not constant across rust binaries)
* Implement "DowncastTypeName" to allow downcasting based on type name
## Benchmarks
Based on the [ecs_bench](https://github.com/lschmierer/ecs_bench) project.
![](bench.png)
## Getting Started
```rust
use legion::prelude::*;
// Define our entity data types
#[derive(Clone, Copy, Debug, PartialEq)]
struct Position {
x: f32,
y: f32,
}
#[derive(Clone, Copy, Debug, PartialEq)]
struct Velocity {
dx: f32,
dy: f32,
}
#[derive(Clone, Copy, Debug, PartialEq)]
struct Model(usize);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Static;
// Create a world to store our entities
let universe = Universe::new();
let mut world = universe.create_world();
// Create entities with `Position` and `Velocity` data
world.insert(
(),
(0..999).map(|_| (Position { x: 0.0, y: 0.0 }, Velocity { dx: 0.0, dy: 0.0 }))
);
// Create entities with `Position` data and a shared `Model` data, tagged as `Static`
// Shared data values are shared across many entities,
// and enable further batch processing and filtering use cases
let entities: &[Entity] = world.insert(
(Model(5), Static),
(0..999).map(|_| (Position { x: 0.0, y: 0.0 },))
);
// Create a query which finds all `Position` and `Velocity` components
let query = <(Write<Position>, Read<Velocity>)>::query();
// Iterate through all entities that match the query in the world
for (mut pos, vel) in query.iter(&mut world) {
pos.x += vel.dx;
pos.y += vel.dy;
}
```
## Features
Legion aims to be a more feature-complete game-ready ECS than many of its predecessors.
### Advanced Query Filters
The query API can do much more than pull entity data out of the world.
Additional data type filters:
```rust
// It is possible to specify that entities must contain data beyond that being fetched
let query = Read::<Position>::query()
.filter(component::<Velocity>());
for position in query.iter(&mut world) {
// these entities also have `Velocity`
}
```
Filter boolean operations:
```rust
// Filters can be combined with boolean operators
let query = Read::<Position>::query()
.filter(tag::<Static>() | !component::<Velocity>());
for position in query.iter(&mut world) {
// these entities are also either marked as `Static`, or do *not* have a `Velocity`
}
```
Filter by shared data value:
```rust
// Filters can filter by specific shared data values
let query = Read::<Position>::query()
.filter(tag_value(&Model(3)));
for position in query.iter(&mut world) {
// these entities all have shared data value `Model(3)`
}
```
Change detection:
```rust
// Queries can perform coarse-grained change detection, rejecting entities who's data
// has not changed since the last time the query was iterated.
let query = <(Read<Position>, Shared<Model>)>::query()
.filter(changed::<Position>());
for (pos, model) in query.iter(&mut world) {
// entities who have changed position
}
```
### Content Streaming
Entities can be loaded and initialized in a background `World` on separate threads and then
when ready, merged into the main `World` near instantaneously.
```rust
let universe = Universe::new();
let mut world_a = universe.create_world();
let mut world_b = universe.create_world();
// Merge all entities from `world_b` into `world_a`
// Entity IDs are guarenteed to be unique across worlds and will
// remain unchanged across the merge.
world_a.merge(world_b);
```
### Chunk Iteration
Entity data is allocated in blocks called "chunks", each approximately containing 64KiB of data. The query API exposes each chunk via `iter_chunk`. As all entities in a chunk are guarenteed to contain the same set of entity data and shared data values, it is possible to do batch processing via the chunk API.
```rust
fn render_instanced(model: &Model, transforms: &[Transform]) {
// pass `transforms` pointer to graphics API to load into constant buffer
// issue instanced draw call with model data and transforms
}
let query = Read::<Transform>::query()
.filter(tag::<Model>());
for chunk in query.iter_chunks_mut(&mut world) {
// get the chunk's model
let model: &Model = chunk.tag().unwrap();
// get a (runtime borrow checked) slice of transforms
let transforms = chunk.components::<Transform>().unwrap();
// give the model and transform slice to our renderer
render_instanced(model, &transforms);
}
```

View file

@ -1 +0,0 @@
fn_single_line = true

View file

@ -1,236 +0,0 @@
//! Legion aims to be a feature rich high performance ECS library for Rust game projects with minimal boilerplate.
//!
//! # Getting Started
//!
//! ```rust
//! use legion::prelude::*;
//!
//! // Define our entity data types
//! #[derive(Clone, Copy, Debug, PartialEq)]
//! struct Position {
//! x: f32,
//! y: f32,
//! }
//!
//! #[derive(Clone, Copy, Debug, PartialEq)]
//! struct Velocity {
//! dx: f32,
//! dy: f32,
//! }
//!
//! #[derive(Clone, Copy, Debug, PartialEq)]
//! struct Model(usize);
//!
//! #[derive(Clone, Copy, Debug, PartialEq)]
//! struct Static;
//!
//! // Create a world to store our entities
//! let universe = Universe::new();
//! let mut world = universe.create_world();
//!
//! // Create entities with `Position` and `Velocity` data
//! world.insert(
//! (),
//! (0..999).map(|_| (Position { x: 0.0, y: 0.0 }, Velocity { dx: 0.0, dy: 0.0 }))
//! );
//!
//! // Create entities with `Position` data and a tagged with `Model` data and as `Static`
//! // Tags are shared across many entities, and enable further batch processing and filtering use cases
//! world.insert(
//! (Model(5), Static),
//! (0..999).map(|_| (Position { x: 0.0, y: 0.0 },))
//! );
//!
//! // Create a query which finds all `Position` and `Velocity` components
//! let mut query = <(Write<Position>, Read<Velocity>)>::query();
//!
//! // Iterate through all entities that match the query in the world
//! for (mut pos, vel) in query.iter_mut(&mut world) {
//! pos.x += vel.dx;
//! pos.y += vel.dy;
//! }
//! ```
//!
//! ### Advanced Query Filters
//!
//! The query API can do much more than pull entity data out of the world.
//!
//! Additional data type filters:
//!
//! ```rust
//! # use legion::prelude::*;
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Position {
//! # x: f32,
//! # y: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Velocity {
//! # dx: f32,
//! # dy: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Model(usize);
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Static;
//! # let universe = Universe::new();
//! # let mut world = universe.create_world();
//! // It is possible to specify that entities must contain data beyond that being fetched
//! let mut query = Read::<Position>::query()
//! .filter(component::<Velocity>());
//! for position in query.iter(&mut world) {
//! // these entities also have `Velocity`
//! }
//! ```
//!
//! Filter boolean operations:
//!
//! ```rust
//! # use legion::prelude::*;
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Position {
//! # x: f32,
//! # y: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Velocity {
//! # dx: f32,
//! # dy: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Model(usize);
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Static;
//! # let universe = Universe::new();
//! # let mut world = universe.create_world();
//! // Filters can be combined with boolean operators
//! let mut query = Read::<Position>::query()
//! .filter(tag::<Static>() | !component::<Velocity>());
//! for position in query.iter(&mut world) {
//! // these entities are also either marked as `Static`, or do *not* have a `Velocity`
//! }
//! ```
//!
//! Filter by tag data value:
//!
//! ```rust
//! # use legion::prelude::*;
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Position {
//! # x: f32,
//! # y: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Velocity {
//! # dx: f32,
//! # dy: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Model(usize);
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Static;
//! # let universe = Universe::new();
//! # let mut world = universe.create_world();
//! // Filters can filter by specific tag values
//! let mut query = Read::<Position>::query()
//! .filter(tag_value(&Model(3)));
//! for position in query.iter(&mut world) {
//! // these entities all have tag value `Model(3)`
//! }
//! ```
//!
//! Change detection:
//!
//! ```rust
//! # use legion::prelude::*;
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Position {
//! # x: f32,
//! # y: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Velocity {
//! # dx: f32,
//! # dy: f32,
//! # }
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Model(usize);
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Static;
//! # let universe = Universe::new();
//! # let mut world = universe.create_world();
//! // Queries can perform coarse-grained change detection, rejecting entities who's data
//! // has not changed since the last time the query was iterated.
//! let mut query = <(Read<Position>, Tagged<Model>)>::query()
//! .filter(changed::<Position>());
//! for (pos, model) in query.iter(&mut world) {
//! // entities who have changed position
//! }
//! ```
//!
//! ### Content Streaming
//!
//! Entities can be loaded and initialized in a background `World` on separate threads and then
//! when ready, merged into the main `World` near instantaneously.
//!
//! ```rust
//! # use legion::prelude::*;
//! let universe = Universe::new();
//! let mut world_a = universe.create_world();
//! let mut world_b = universe.create_world();
//!
//! // Merge all entities from `world_b` into `world_a`
//! // Entity IDs are guarenteed to be unique across worlds and will
//! // remain unchanged across the merge.
//! world_a.merge(world_b);
//! ```
//!
//! ### Chunk Iteration
//!
//! Entity data is allocated in blocks called "chunks", each approximately containing 64KiB of data.
//! The query API exposes each chunk via 'iter_chunk'. As all entities in a chunk are guarenteed to contain the same set of entity
//! data and shared data values, it is possible to do batch processing via the chunk API.
//!
//! ```rust
//! # use legion::prelude::*;
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Transform;
//! # #[derive(Clone, Copy, Debug, PartialEq)]
//! # struct Model(usize);
//! # let universe = Universe::new();
//! # let mut world = universe.create_world();
//! fn render_instanced(model: &Model, transforms: &[Transform]) {
//! // pass `transforms` pointer to graphics API to load into constant buffer
//! // issue instanced draw call with model data and transforms
//! }
//!
//! let mut query = Read::<Transform>::query()
//! .filter(tag::<Model>());
//!
//! for chunk in query.iter_chunks_mut(&mut world) {
//! // get the chunk's model
//! let model: &Model = chunk.tag().unwrap();
//!
//! // get a (runtime borrow checked) slice of transforms
//! let transforms = chunk.components::<Transform>().unwrap();
//!
//! // give the model and transform slice to our renderer
//! render_instanced(model, &transforms);
//! }
//! ```
//!
//! # Feature Flags
//!
//! * `par-iter`: Enables parallel APIs on queries (enabled by default).
//! * `par-schedule`: Configures system schedulers to try and run systems in parallel where possible (enabled by default).
//! * `log`: Configures `tracing` to redirect events to the `log` crate. This is a convenience feature for applications
//! that use `log` and do not wish to interact with `tracing`.
#![allow(dead_code)]
pub use legion_core::*;
pub use legion_systems as systems;
pub mod prelude {
pub use legion_core::prelude::*;
pub use legion_systems::prelude::*;
}

View file

@ -1,32 +0,0 @@
use legion::iterator::*;
use legion::storage::SliceVec;
#[test]
fn slice_vec_iterate() {
let mut vec = SliceVec::default();
vec.push(vec![1, 2, 3, 4]);
vec.push(vec![5, 6, 7, 8]);
vec.push(vec![9, 10]);
let mut iter = vec.iter();
assert_eq!(Some(&[1, 2, 3, 4][..]), iter.next());
assert_eq!(Some(&[5, 6, 7, 8][..]), iter.next());
assert_eq!(Some(&[9, 10][..]), iter.next());
assert_eq!(None, iter.next());
}
#[test]
fn slice_vec_iterator_split() {
let mut vec = SliceVec::default();
vec.push(vec![1, 2, 3, 4]);
vec.push(vec![5, 6, 7, 8]);
vec.push(vec![9, 10]);
let (mut left, mut right, left_len) = vec.iter().split();
assert_eq!(left_len, 1);
assert_eq!(Some(&[1, 2, 3, 4][..]), left.next());
assert_eq!(None, left.next());
assert_eq!(Some(&[5, 6, 7, 8][..]), right.next());
assert_eq!(Some(&[9, 10][..]), right.next());
assert_eq!(None, right.next());
}

View file

@ -1,608 +0,0 @@
#![allow(clippy::map_clone)]
use legion::prelude::*;
use std::collections::HashMap;
#[cfg(feature = "par-iter")]
use std::sync::atomic::{AtomicUsize, Ordering};
#[derive(Clone, Copy, Debug, PartialEq)]
struct Pos(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Rot(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Scale(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Vel(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Accel(f32, f32, f32);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
struct Model(u32);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
struct Static;
#[test]
fn query_read_entity_data() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = Read::<Pos>::query();
let mut count = 0;
for (entity, pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count += 1;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_try_read_entity_data() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((), Some((Pos(1., 2., 3.),)));
world.insert((), Some((Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6))));
let query = TryRead::<Rot>::query();
let rots = query
.iter(&world)
.map(|x| x.map(|x| *x))
.collect::<Vec<_>>();
assert_eq!(rots.iter().filter(|x| x.is_none()).count(), 1);
assert_eq!(
rots.iter().cloned().filter_map(|x| x).collect::<Vec<_>>(),
&[Rot(0.4, 0.5, 0.6)]
);
}
#[test]
fn query_try_write_entity_data() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((), Some((Pos(1., 2., 3.),)));
let entity = world.insert((), Some((Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6))))[0];
let query = TryWrite::<Rot>::query();
for mut x in query.iter_mut(&mut world).filter_map(|x| x) {
*x = Rot(9.0, 9.0, 9.0);
}
assert_eq!(
world.get_component::<Rot>(entity).map(|x| *x),
Some(Rot(9.0, 9.0, 9.0))
);
}
#[test]
fn query_cached_read_entity_data() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = Read::<Pos>::query(); //.cached();
let mut count = 0;
for (entity, pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count += 1;
}
assert_eq!(components.len(), count);
}
#[test]
#[cfg(feature = "par-iter")]
fn query_read_entity_data_par() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let count = AtomicUsize::new(0);
let query = Read::<Pos>::query();
query.par_for_each_chunk_mut(&mut world, |mut chunk| {
for (entity, pos) in chunk.iter_entities_mut() {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count.fetch_add(1, Ordering::SeqCst);
}
});
assert_eq!(components.len(), count.load(Ordering::SeqCst));
}
#[test]
#[cfg(feature = "par-iter")]
fn query_read_entity_data_par_foreach() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let count = AtomicUsize::new(0);
let query = Read::<Pos>::query();
query.par_for_each_mut(&mut world, |_pos| {
count.fetch_add(1, Ordering::SeqCst);
});
assert_eq!(components.len(), count.load(Ordering::SeqCst));
}
#[test]
fn query_read_entity_data_tuple() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = <(Read<Pos>, Read<Rot>)>::query();
let mut count = 0;
for (entity, (pos, rot)) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
assert_eq!(expected.get(&entity).unwrap().1, *rot);
count += 1;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_write_entity_data() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = Write::<Pos>::query();
let mut count = 0;
for (entity, mut pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count += 1;
pos.0 = 0.0;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_write_entity_data_tuple() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = <(Write<Pos>, Write<Rot>)>::query();
let mut count = 0;
for (entity, (mut pos, mut rot)) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
assert_eq!(expected.get(&entity).unwrap().1, *rot);
count += 1;
pos.0 = 0.0;
rot.0 = 0.0;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_mixed_entity_data_tuple() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = <(Read<Pos>, Write<Rot>)>::query();
let mut count = 0;
for (entity, (pos, mut rot)) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
assert_eq!(expected.get(&entity).unwrap().1, *rot);
count += 1;
rot.0 = 0.0;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_partial_match() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = <(Read<Pos>, Write<Rot>)>::query();
let mut count = 0;
for (entity, (pos, mut rot)) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
assert_eq!(expected.get(&entity).unwrap().1, *rot);
count += 1;
rot.0 = 0.0;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_read_shared_data() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
world.insert(shared, components.clone());
let query = Tagged::<Static>::query();
let mut count = 0;
for marker in query.iter(&world) {
assert_eq!(Static, *marker);
count += 1;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_on_changed_first() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = Read::<Pos>::query().filter(changed::<Pos>() | changed::<Rot>());
let mut count = 0;
for (entity, pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count += 1;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_on_changed_no_changes() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = Read::<Pos>::query().filter(changed::<Pos>());
let mut count = 0;
for (entity, pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count += 1;
}
assert_eq!(components.len(), count);
count = 0;
for (entity, pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
count += 1;
}
assert_eq!(0, count);
}
#[test]
fn query_on_changed_self_changes() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut expected = HashMap::<Entity, (Pos, Rot)>::new();
for (i, e) in world.insert(shared, components.clone()).iter().enumerate() {
if let Some((pos, rot)) = components.get(i) {
expected.insert(*e, (*pos, *rot));
}
}
let query = Write::<Pos>::query().filter(changed::<Pos>());
let mut count = 0;
for (entity, mut pos) in query.iter_entities_mut(&mut world) {
assert_eq!(expected.get(&entity).unwrap().0, *pos);
*pos = Pos(1., 1., 1.);
count += 1;
}
assert_eq!(components.len(), count);
count = 0;
for pos in query.iter_mut(&mut world) {
assert_eq!(Pos(1., 1., 1.), *pos);
count += 1;
}
assert_eq!(components.len(), count);
}
#[test]
fn query_try_with_changed_filter() {
let _ = tracing_subscriber::fmt::try_init();
#[derive(Clone, Copy, Debug, PartialEq)]
struct Sum(f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct A(f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct B(f32);
let universe = Universe::new();
let mut world = universe.create_world();
let sum_entity = world.insert((), Some((Sum(0.),)))[0];
let a_entity = world.insert((), Some((Sum(0.), A(1.))))[0];
let b_entity = world.insert((), Some((Sum(0.), B(2.))))[0];
let a_b_entity = world.insert((), Some((Sum(0.), A(1.), B(2.))))[0];
let query =
<(Write<Sum>, TryRead<A>, TryRead<B>)>::query().filter(changed::<A>() | changed::<B>());
let mut count = 0;
for (mut sum, a, b) in query.iter_mut(&mut world) {
sum.0 = a.map_or(0., |x| x.0) + b.map_or(0., |x| x.0);
count += 1;
}
assert_eq!(3, count);
assert_eq!(
world.get_component::<Sum>(sum_entity).map(|x| *x),
Some(Sum(0.))
);
assert_eq!(
world.get_component::<Sum>(a_entity).map(|x| *x),
Some(Sum(1.))
);
assert_eq!(
world.get_component::<Sum>(b_entity).map(|x| *x),
Some(Sum(2.))
);
assert_eq!(
world.get_component::<Sum>(a_b_entity).map(|x| *x),
Some(Sum(3.))
);
count = 0;
for (mut sum, a, b) in query.iter_mut(&mut world) {
sum.0 = a.map_or(0., |x| x.0) + b.map_or(0., |x| x.0);
count += 1;
}
assert_eq!(0, count);
*world.get_component_mut::<B>(a_b_entity).unwrap() = B(3.0);
count = 0;
for (mut sum, a, b) in query.iter_mut(&mut world) {
sum.0 = a.map_or(0., |x| x.0) + b.map_or(0., |x| x.0);
count += 1;
}
assert_eq!(1, count);
assert_eq!(
world.get_component::<Sum>(a_b_entity).map(|x| *x),
Some(Sum(4.))
);
}
#[test]
fn query_iter_chunks_tag() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((Static, Model(0)), vec![()]);
world.insert((Static, Model(1)), vec![()]);
world.insert((Static, Model(2)), vec![()]);
let query = <(Tagged<Static>, Tagged<Model>)>::query();
for chunk in query.iter_chunks(&world) {
let model = chunk.tag::<Model>().cloned();
for entity in chunk.entities() {
assert_eq!(world.get_tag::<Model>(*entity), model.as_ref());
}
}
}
#[test]
fn query_iter_tag() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((Static, Model(0)), vec![(0u32,)]);
world.insert((Static, Model(1)), vec![(1u32,)]);
world.insert((Static, Model(2)), vec![(2u32,)]);
let query = <(Tagged<Static>, Tagged<Model>, Read<u32>)>::query();
for (s, m, c) in query.iter(&world) {
assert_eq!(&Static, s);
assert_eq!(&Model(*c), m);
}
}

View file

@ -1,550 +0,0 @@
use legion::prelude::*;
use std::collections::HashSet;
#[derive(Clone, Copy, Debug, PartialEq)]
struct Pos(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Rot(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Scale(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Vel(f32, f32, f32);
#[derive(Clone, Copy, Debug, PartialEq)]
struct Accel(f32, f32, f32);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
struct Model(u32);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
struct Static;
#[test]
fn insert() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (1usize, 2f32, 3u16);
let components = vec![(4f32, 5u64, 6u16), (4f32, 5u64, 6u16)];
let entities = world.insert(shared, components);
assert_eq!(2, entities.len());
}
#[test]
fn get_component() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components.clone()) {
entities.push(*e);
}
for (i, e) in entities.iter().enumerate() {
match world.get_component(*e) {
Some(x) => assert_eq!(components.get(i).map(|(x, _)| x), Some(&x as &Pos)),
None => assert_eq!(components.get(i).map(|(x, _)| x), None),
}
match world.get_component(*e) {
Some(x) => assert_eq!(components.get(i).map(|(_, x)| x), Some(&x as &Rot)),
None => assert_eq!(components.get(i).map(|(_, x)| x), None),
}
}
}
#[test]
fn get_component_wrong_type() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let entity = *world.insert((), vec![(0f64,)]).get(0).unwrap();
assert!(world.get_component::<i32>(entity).is_none());
}
#[test]
fn get_shared() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components) {
entities.push(*e);
}
for e in entities.iter() {
assert_eq!(Some(&Static), world.get_tag(*e));
assert_eq!(Some(&Model(5)), world.get_tag(*e));
}
}
#[test]
fn get_shared_wrong_type() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let entity = *world.insert((Static,), vec![(0f64,)]).get(0).unwrap();
assert!(world.get_tag::<Model>(entity).is_none());
}
#[test]
fn delete() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components) {
entities.push(*e);
}
for e in entities.iter() {
assert_eq!(true, world.is_alive(*e));
}
for e in entities.iter() {
world.delete(*e);
assert_eq!(false, world.is_alive(*e));
}
}
#[test]
fn delete_all() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components) {
entities.push(*e);
}
// Check that the entity allocator knows about the entities
for e in entities.iter() {
assert_eq!(true, world.is_alive(*e));
}
// Check that the entities are in storage
let query = <(Read<Pos>, Read<Rot>)>::query();
assert_eq!(2, query.iter(&world).count());
world.delete_all();
// Check that the entity allocator no longer knows about the entities
for e in entities.iter() {
assert_eq!(false, world.is_alive(*e));
}
// Check that the entities are removed from storage
let query = <(Read<Pos>, Read<Rot>)>::query();
assert_eq!(0, query.iter(&world).count());
}
#[test]
fn delete_last() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components.clone()) {
entities.push(*e);
}
let last = *entities.last().unwrap();
world.delete(last);
assert_eq!(false, world.is_alive(last));
for (i, e) in entities.iter().take(entities.len() - 1).enumerate() {
assert_eq!(true, world.is_alive(*e));
match world.get_component(*e) {
Some(x) => assert_eq!(components.get(i).map(|(x, _)| x), Some(&x as &Pos)),
None => assert_eq!(components.get(i).map(|(x, _)| x), None),
}
match world.get_component(*e) {
Some(x) => assert_eq!(components.get(i).map(|(_, x)| x), Some(&x as &Rot)),
None => assert_eq!(components.get(i).map(|(_, x)| x), None),
}
}
}
#[test]
fn delete_first() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components.clone()) {
entities.push(*e);
}
let first = *entities.first().unwrap();
world.delete(first);
assert_eq!(false, world.is_alive(first));
for (i, e) in entities.iter().skip(1).enumerate() {
assert_eq!(true, world.is_alive(*e));
match world.get_component(*e) {
Some(x) => assert_eq!(components.get(i + 1).map(|(x, _)| x), Some(&x as &Pos)),
None => assert_eq!(components.get(i + 1).map(|(x, _)| x), None),
}
match world.get_component(*e) {
Some(x) => assert_eq!(components.get(i + 1).map(|(_, x)| x), Some(&x as &Rot)),
None => assert_eq!(components.get(i + 1).map(|(_, x)| x), None),
}
}
}
#[test]
fn merge() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world_1 = universe.create_world();
let mut world_2 = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut world_1_entities: Vec<Entity> = Vec::new();
for e in world_1.insert(shared, components.clone()) {
world_1_entities.push(*e);
}
let mut world_2_entities: Vec<Entity> = Vec::new();
for e in world_2.insert(shared, components.clone()) {
world_2_entities.push(*e);
}
world_1.merge(world_2);
for (i, e) in world_2_entities.iter().enumerate() {
assert!(world_1.is_alive(*e));
let (pos, rot) = components.get(i).unwrap();
assert_eq!(pos, &world_1.get_component(*e).unwrap() as &Pos);
assert_eq!(rot, &world_1.get_component(*e).unwrap() as &Rot);
}
}
#[test]
fn mutate_add_component() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let entities = world.insert(shared, components).to_vec();
let query_without_scale = <(Read<Pos>, Read<Rot>)>::query();
let query_with_scale = <(Read<Pos>, Read<Rot>, Read<Scale>)>::query();
assert_eq!(3, query_without_scale.iter(&world).count());
assert_eq!(0, query_with_scale.iter(&world).count());
world
.add_component(*entities.get(1).unwrap(), Scale(0.5, 0.5, 0.5))
.unwrap();
assert_eq!(3, query_without_scale.iter(&world).count());
assert_eq!(1, query_with_scale.iter(&world).count());
}
#[test]
fn mutate_remove_component() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let entities = world.insert(shared, components).to_vec();
let query_without_rot = Read::<Pos>::query().filter(!component::<Rot>());
let query_with_rot = <(Read<Pos>, Read<Rot>)>::query();
assert_eq!(0, query_without_rot.iter(&world).count());
assert_eq!(3, query_with_rot.iter(&world).count());
world
.remove_component::<Rot>(*entities.get(1).unwrap())
.unwrap();
assert_eq!(1, query_without_rot.iter(&world).count());
assert_eq!(2, query_with_rot.iter(&world).count());
}
#[test]
fn mutate_add_tag() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Model(5),);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let entities = world.insert(shared, components).to_vec();
let query_without_static = <(Read<Pos>, Read<Rot>)>::query();
let query_with_static = <(Read<Pos>, Read<Rot>, Tagged<Static>)>::query();
assert_eq!(3, query_without_static.iter(&world).count());
assert_eq!(0, query_with_static.iter(&world).count());
world.add_tag(*entities.get(1).unwrap(), Static).unwrap();
assert_eq!(3, query_without_static.iter(&world).count());
assert_eq!(1, query_with_static.iter(&world).count());
}
#[test]
fn mutate_remove_tag() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Model(5), Static);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let entities = world.insert(shared, components).to_vec();
let query_without_static = <(Read<Pos>, Read<Rot>)>::query().filter(!tag::<Static>());
let query_with_static = <(Read<Pos>, Read<Rot>, Tagged<Static>)>::query();
assert_eq!(0, query_without_static.iter(&world).count());
assert_eq!(3, query_with_static.iter(&world).count());
world
.remove_tag::<Static>(*entities.get(1).unwrap())
.unwrap();
assert_eq!(1, query_without_static.iter(&world).count());
assert_eq!(2, query_with_static.iter(&world).count());
}
#[test]
fn mutate_change_tag_minimum_test() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Model(5),);
let components = vec![(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3))];
let entities = world.insert(shared, components).to_vec();
tracing::trace!("STARTING CHANGE");
world.add_tag(entities[0], Model(3)).unwrap();
tracing::trace!("CHANGED\n");
assert_eq!(*world.get_tag::<Model>(entities[0]).unwrap(), Model(3));
}
#[test]
fn delete_entities_on_drop() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let (tx, rx) = crossbeam_channel::unbounded::<legion::event::Event>();
let shared = (Model(5),);
let components = vec![(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3))];
// Insert the data and store resulting entities in a HashSet
let mut entities = HashSet::new();
for entity in world.insert(shared, components) {
entities.insert(*entity);
}
world.subscribe(tx, legion::filter::filter_fns::any());
//ManuallyDrop::drop(&mut world);
std::mem::drop(world);
for e in rx.try_recv() {
match e {
legion::event::Event::EntityRemoved(entity, _chunk_id) => {
assert!(entities.remove(&entity));
}
_ => {}
}
}
// Verify that no extra entities are included
assert!(entities.is_empty());
}
#[test]
#[allow(clippy::suspicious_map)]
fn mutate_change_tag() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Model(5),);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let entities = world.insert(shared, components).to_vec();
let query_model_3 = <(Read<Pos>, Read<Rot>)>::query().filter(tag_value(&Model(3)));
let query_model_5 = <(Read<Pos>, Read<Rot>)>::query().filter(tag_value(&Model(5)));
assert_eq!(3, query_model_5.iter(&world).count());
assert_eq!(0, query_model_3.iter(&world).count());
tracing::trace!("STARTING CHANGE");
world.add_tag(*entities.get(1).unwrap(), Model(3)).unwrap();
tracing::trace!("CHANGED\n");
assert_eq!(
1,
query_model_3
.iter_entities_mut(&mut world)
.map(|e| {
tracing::trace!("iter: {:?}", e);
e
})
.count()
);
assert_eq!(
*world.get_tag::<Model>(*entities.get(1).unwrap()).unwrap(),
Model(3)
);
assert_eq!(2, query_model_5.iter(&world).count());
}
// This test repeatedly creates a world with new entities and drops it, reproducing
// https://github.com/TomGillen/legion/issues/92
#[test]
fn lots_of_deletes() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
for _ in 0..10000 {
let shared = (Model(5),);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut world = universe.create_world();
world.insert(shared, components).to_vec();
}
}
#[test]
fn iter_entities() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Model(5),);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
// Insert the data and store resulting entities in a HashSet
let mut entities = HashSet::new();
for entity in world.insert(shared, components) {
entities.insert(*entity);
}
// Verify that all entities in iter_entities() are included
for entity in world.iter_entities() {
assert!(entities.remove(&entity));
}
// Verify that no extra entities are included
assert!(entities.is_empty());
}

View file

1
crates/bevy_legion Submodule

@ -0,0 +1 @@
Subproject commit c5b9628630d4f9fc54b6843b5ce02d0669434a61

Some files were not shown because too many files have changed in this diff Show more