upgrade bevy_legion / re-apply fork

This commit is contained in:
Carter Anderson 2020-03-09 00:46:38 -07:00
parent 9355a53980
commit 6ef1c099ff
47 changed files with 2180 additions and 1257 deletions

View file

@ -6,8 +6,7 @@ edition = "2018"
[dependencies]
# Modified to use std::any::type_name instead of std::any::TypeId
# legion = { path = "bevy_legion", features = ["serde-1"] }
legion = { git = "https://github.com/TomGillen/legion", rev = "c5b9628630d4f9fc54b6843b5ce02d0669434a61", features = ["serialize"] }
legion = { path = "bevy_legion", features = ["serialize"] }
wgpu = { git = "https://github.com/gfx-rs/wgpu-rs.git", rev = "a7b0d5ae5bc0934439ef559ed145e93f0117c39a"}
glam = "0.8.4"
winit = "0.21.0"

101
bevy_legion/.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,101 @@
name: CI
on:
push:
branches:
- master
pull_request:
jobs:
check:
name: Compile
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- uses: actions/cache@v1.0.1
with:
path: target
key: ${{ runner.OS }}-build-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.OS }}-build-
- uses: actions-rs/cargo@v1
with:
command: test
args: --all
check-features:
name: Features
runs-on: ubuntu-latest
needs: [check]
strategy:
matrix:
features:
- --manifest-path=example/Cargo.toml
- --all-features --release
- --no-default-features --release
- --all-features
- --no-default-features
- --no-default-features --features par-iter
- --no-default-features --features par-schedule
- --no-default-features --features metrics
- --no-default-features --features ffi
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- uses: actions/cache@v1.0.1
with:
path: target
key: ${{ runner.OS }}-build-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.OS }}-build-
- uses: actions-rs/cargo@v1
with:
command: test
args: --all ${{ matrix.features }}
fmt:
name: Rustfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
components: rustfmt
- uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add clippy
- uses: actions-rs/cargo@v1
with:
command: clippy
args: --all -- -D warnings

3
bevy_legion/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
target
**/*.rs.bk
Cargo.lock

2
bevy_legion/.idea/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
# Default ignored files
/workspace.xml

View file

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_systems/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/legion_core/benches" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/legion_core/target" />
<excludeFolder url="file://$MODULE_DIR$/legion_systems/target" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
</project>

View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/legion.iml" filepath="$PROJECT_DIR$/.idea/legion.iml" />
</modules>
</component>
</project>

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

View file

@ -1,21 +0,0 @@
language: rust
rust:
- stable
- beta
- nightly
matrix:
allow_failures:
- rust: nightly
fast_finish: true
cache: cargo
script:
- cargo test --all-features
- cargo test --no-default-features
- cargo test --no-default-features --features events
- cargo test --no-default-features --features par-iter
- cargo test --no-default-features --features ffi
- cargo test --release --all-features
- cargo test --release --no-default-features
- cargo test --release --no-default-features --features events
- cargo test --release --no-default-features --features par-iter
- cargo test --release --no-default-features --features ffi

View file

@ -14,40 +14,35 @@ edition = "2018"
travis-ci = { repository = "TomGillen/legion", branch = "master" }
[features]
default = ["par-iter", "par-schedule", "events", "ffi"]
par-iter = ["rayon"]
par-schedule = ["rayon", "crossbeam-queue"]
default = ["par-iter", "par-schedule", "ffi"]
par-iter = ["legion-core/par-iter", "legion-systems/par-iter"]
par-schedule = ["legion-systems/par-schedule"]
log = ["tracing/log", "tracing/log-always"]
ffi = []
serde-1 = ["serde"]
events = ["rayon"]
ffi = ["legion-core/ffi"]
serialize = ["legion-core/serialize"]
metrics = ["legion-core/metrics"]
[workspace]
members = [
"legion_core",
"legion_systems",
]
[dependencies]
parking_lot = "0.9"
downcast-rs = "1.0"
itertools = "0.8"
rayon = { version = "1.2", optional = true }
crossbeam-queue = { version = "0.2.0", optional = true }
crossbeam-channel = "0.4.0"
derivative = "1"
smallvec = "0.6"
bit-set = "0.5"
paste = "0.1"
tracing = "0.1"
metrics = { version = "0.12", optional = true }
serde = { version = "1", optional = true }
fxhash = "0.2"
legion-core = { path = "legion_core", version = "0.2.1", default-features = false }
legion-systems = { path = "legion_systems", version = "0.2.1", default-features = false }
[dev-dependencies]
criterion = "0.3"
cgmath = "0.17"
tracing-subscriber = "0.1.6"
legion = { features = ["serde-1"], path = "." }
serde_json = "1.0"
type-uuid = "0.1"
tracing-subscriber = "0.2"
erased-serde = "0.3"
serde = { version = "1", features = ["derive"]}
uuid = { version = "0.8", features = ["v4"] }
tracing = "0.1"
itertools = "0.8"
rayon = "1.2"
crossbeam-channel = "0.4.0"
[[bench]]
name = "benchmarks"
@ -60,3 +55,7 @@ harness = false
[[bench]]
name = "transform"
harness = false
[[bench]]
name = "insertion"
harness = false

View file

@ -54,16 +54,16 @@ fn add_background_entities(world: &mut World, count: usize) {
create_entities(
world,
&mut [
Box::new(|e, w| w.add_component(e, A(0.0))),
Box::new(|e, w| w.add_component(e, B(0.0))),
Box::new(|e, w| w.add_component(e, C(0.0))),
Box::new(|e, w| w.add_tag(e, Tag(0.0))),
Box::new(|e, w| w.add_component(e, D(0.0))),
Box::new(|e, w| w.add_tag(e, Tag(1.0))),
Box::new(|e, w| w.add_component(e, E(0.0))),
Box::new(|e, w| w.add_tag(e, Tag(2.0))),
Box::new(|e, w| w.add_component(e, F(0.0))),
Box::new(|e, w| w.add_tag(e, Tag(3.0))),
Box::new(|e, w| w.add_component(e, A(0.0)).unwrap()),
Box::new(|e, w| w.add_component(e, B(0.0)).unwrap()),
Box::new(|e, w| w.add_component(e, C(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(0.0)).unwrap()),
Box::new(|e, w| w.add_component(e, D(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(1.0)).unwrap()),
Box::new(|e, w| w.add_component(e, E(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(2.0)).unwrap()),
Box::new(|e, w| w.add_component(e, F(0.0)).unwrap()),
Box::new(|e, w| w.add_tag(e, Tag(3.0)).unwrap()),
],
5,
count,

View file

@ -0,0 +1,69 @@
use criterion::*;
use legion::prelude::*;
fn bench_insert_zero_baseline(c: &mut Criterion) {
c.bench_function("insert_zero_baseline", |b| {
b.iter(|| {
//let universe = Universe::new();
//let mut world = universe.create_world();
let components: Vec<isize> = (0..10000).map(|i| i).collect();
criterion::black_box(components);
});
});
}
fn bench_insert_one_baseline(c: &mut Criterion) {
c.bench_function("insert_one_baseline", |b| {
b.iter(|| {
let universe = Universe::new();
let mut world = universe.create_world();
let components: Vec<isize> = (0..10000).map(|i| i).collect();
criterion::black_box(components);
world.insert((), vec![(1usize,)]);
});
});
}
fn bench_insert_unbatched(c: &mut Criterion) {
c.bench_function("insert_unbatched", |b| {
b.iter(|| {
let universe = Universe::new();
let mut world = universe.create_world();
let components: Vec<isize> = (0..10000).map(|i| i).collect();
for component in components {
world.insert((), vec![(component,)]);
}
});
});
}
fn bench_insert_batched(c: &mut Criterion) {
c.bench(
"insert_batched",
ParameterizedBenchmark::new(
"counts",
|b, n| {
b.iter(|| {
let universe = Universe::new();
let mut world = universe.create_world();
let components: Vec<(isize,)> = (0..*n).map(|i| (i,)).collect();
world.insert((), components);
});
},
(1..11).map(|i| i * 1000),
),
);
}
criterion_group!(
basic,
bench_insert_zero_baseline,
bench_insert_one_baseline,
bench_insert_unbatched,
bench_insert_batched,
);
criterion_main!(basic);

View file

@ -55,23 +55,27 @@ fn setup(data: &[Variants]) -> World {
match i {
0 => world.insert(
(),
group.map(|x| {
if let Variants::AB(a, b) = x {
(*a, *b)
} else {
panic!();
}
}),
group
.map(|x| {
if let Variants::AB(a, b) = x {
(*a, *b)
} else {
panic!();
}
})
.collect::<Vec<_>>(),
),
_ => world.insert(
(),
group.map(|x| {
if let Variants::AC(a, c) = x {
(*a, *c)
} else {
panic!();
}
}),
group
.map(|x| {
if let Variants::AC(a, c) = x {
(*a, *c)
} else {
panic!();
}
})
.collect::<Vec<_>>(),
),
};
}

View file

@ -0,0 +1,6 @@
[workspace]
members = [
"hello_world",
"serialization",
]

View file

@ -0,0 +1,9 @@
[package]
name = "hello_world"
version = "0.1.0"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
edition = "2018"
[dependencies]
legion = { path = "../.." }
tracing-subscriber = "0.2"

View file

@ -17,16 +17,13 @@ fn main() {
let universe = Universe::new();
let mut world = universe.create_world();
// Insert resources into the world
// Resources are also dynamically scheduled just like components, so the accessed
// declared within a SystemBuilder is correct.
// Create resources
// Resources are also dynamically scheduled just like components, so the accesses
// declared within a SystemBuilder are correct.
// Any resource accessed by systems *must be* manually inserted beforehand, otherwise it will panic.
world
.resources
.insert(ExampleResource1("ExampleResource1".to_string()));
world
.resources
.insert(ExampleResource2("ExampleResource2".to_string()));
let mut resources = Resources::default();
resources.insert(ExampleResource1("ExampleResource1".to_string()));
resources.insert(ExampleResource2("ExampleResource2".to_string()));
// create entities
// An insert call is used to insert matching entities into the world.
@ -89,7 +86,7 @@ fn main() {
);
});
let thread_local_example = Box::new(|world: &mut World| {
let thread_local_example = Box::new(|world: &mut World, _resources: &mut Resources| {
// This is an example of a thread local system which has full, exclusive mutable access to the world.
let query = <(Write<Pos>, Read<Vel>)>::query();
for (mut pos, vel) in query.iter_mut(world) {
@ -110,5 +107,5 @@ fn main() {
.build();
// Execute a frame of the schedule.
schedule.execute(&mut world);
schedule.execute(&mut world, &mut resources);
}

View file

@ -0,0 +1,14 @@
[package]
name = "serialization"
version = "0.1.0"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
edition = "2018"
[dependencies]
legion = { path = "../..", features = ["serialize"] }
tracing-subscriber = "0.2"
serde_json = "1.0"
type-uuid = "0.1"
erased-serde = "0.3"
serde = { version = "1", features = ["derive"]}
uuid = { version = "0.8", features = ["v4"] }

View file

@ -75,12 +75,15 @@ impl<'de, 'a, T: for<'b> Deserialize<'b> + 'static> Visitor<'de>
Some((storage_ptr, storage_len)) => {
let storage_ptr = storage_ptr.as_ptr() as *mut T;
for idx in 0..storage_len {
let element_ptr = unsafe { storage_ptr.offset(idx as isize) };
let element_ptr = unsafe { storage_ptr.add(idx) };
if let None = seq.next_element_seed(ComponentDeserializer {
ptr: element_ptr,
_marker: PhantomData,
})? {
if seq
.next_element_seed(ComponentDeserializer {
ptr: element_ptr,
_marker: PhantomData,
})?
.is_none()
{
panic!(
"expected {} elements in chunk but only {} found",
storage_len, idx
@ -89,7 +92,7 @@ impl<'de, 'a, T: for<'b> Deserialize<'b> + 'static> Visitor<'de>
}
}
None => {
if let Some(_) = seq.next_element::<IgnoredAny>()? {
if seq.next_element::<IgnoredAny>()?.is_some() {
panic!("unexpected element when there was no storage space available");
} else {
// No more elements and no more storage - that's what we want!
@ -199,7 +202,7 @@ struct SerializeImpl {
comp_types: HashMap<TypeId, ComponentRegistration>,
entity_map: RefCell<HashMap<Entity, uuid::Bytes>>,
}
impl legion::ser::WorldSerializer for SerializeImpl {
impl legion::serialize::ser::WorldSerializer for SerializeImpl {
fn can_serialize_tag(&self, ty: &TagTypeId, _meta: &TagMeta) -> bool {
self.tag_types.get(&ty.0).is_some()
}
@ -302,7 +305,7 @@ struct DeserializeImpl {
comp_types_by_uuid: HashMap<type_uuid::Bytes, ComponentRegistration>,
entity_map: RefCell<HashMap<uuid::Bytes, Entity>>,
}
impl legion::de::WorldDeserializer for DeserializeImpl {
impl legion::serialize::de::WorldDeserializer for DeserializeImpl {
fn deserialize_archetype_description<'de, D: Deserializer<'de>>(
&self,
deserializer: D,
@ -417,7 +420,7 @@ fn main() {
entity_map: RefCell::new(HashMap::new()),
};
let serializable = legion::ser::serializable_world(&world, &ser_helper);
let serializable = legion::serialize::ser::serializable_world(&world, &ser_helper);
let serialized_data = serde_json::to_string(&serializable).unwrap();
let de_helper = DeserializeImpl {
tag_types_by_uuid: HashMap::from_iter(
@ -445,7 +448,8 @@ fn main() {
};
let mut deserialized_world = universe.create_world();
let mut deserializer = serde_json::Deserializer::from_str(&serialized_data);
legion::de::deserialize(&mut deserialized_world, &de_helper, &mut deserializer).unwrap();
legion::serialize::de::deserialize(&mut deserialized_world, &de_helper, &mut deserializer)
.unwrap();
let ser_helper = SerializeImpl {
tag_types: de_helper.tag_types,
comp_types: de_helper.comp_types,
@ -458,7 +462,7 @@ fn main() {
.map(|(uuid, e)| (e, uuid)),
)),
};
let serializable = legion::ser::serializable_world(&deserialized_world, &ser_helper);
let serializable = legion::serialize::ser::serializable_world(&deserialized_world, &ser_helper);
let roundtrip_data = serde_json::to_string(&serializable).unwrap();
assert_eq!(roundtrip_data, serialized_data);
}

View file

@ -0,0 +1,45 @@
[package]
name = "legion-core"
version = "0.2.1"
description = "High performance entity component system (ECS) library"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
repository = "https://github.com/TomGillen/legion"
keywords = ["ecs", "game"]
categories = ["game-engines", "data-structures"]
readme = "readme.md"
license = "MIT"
edition = "2018"
[badges]
travis-ci = { repository = "TomGillen/legion", branch = "master" }
[features]
par-iter = ["rayon"]
ffi = []
serialize = ["serde"]
[dependencies]
parking_lot = "0.10"
downcast-rs = "1.0"
itertools = "0.8"
rayon = { version = "1.2", optional = true }
crossbeam-queue = { version = "0.2.0", optional = true }
crossbeam-channel = "0.4.0"
derivative = "1"
smallvec = "1.2"
tracing = "0.1"
metrics = { version = "0.12", optional = true }
serde = { version = "1", optional = true }
fxhash = "0.2"
thiserror = "1.0"
[dev-dependencies]
tracing-subscriber = "0.2"
serde_json = "1.0"
type-uuid = "0.1"
erased-serde = "0.3"
serde = { version = "1", features = ["derive"]}
uuid = { version = "0.8", features = ["v4"] }
tracing = "0.1"
itertools = "0.8"
rayon = "1.2"

View file

@ -7,62 +7,16 @@ use std::any::{Any, type_name};
use std::ops::Deref;
use std::ops::DerefMut;
use std::sync::atomic::AtomicIsize;
use crate::resource::Resource;
#[cfg(not(debug_assertions))]
use std::marker::PhantomData;
// #[inline(always)]
// pub fn downcast_typename_mut<U: Any>(value: &mut dyn Any) -> &mut U {
// unsafe { &mut *(value as *mut dyn Any as *mut U) }
// }
// #[inline(always)]
// pub fn downcast_typename_ref<U: Any>(value: &dyn Any) -> &U {
// unsafe { &*(value as *const dyn Any as *const U) }
// // if type_name::<T>() == type_name::<U>() {
// // unsafe { Some(&*(value as *const dyn Any as *const U)) }
// // } else {
// // None
// // }
// }
pub trait DowncastTypename {
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T>;
fn downcast_typename_ref<T: Any>(&self) -> Option<&T>;
fn is_typename<T: Any>(&self) -> bool;
}
impl DowncastTypename for dyn Resource {
#[inline(always)]
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T> {
if self.is_typename::<T>() {
// SAFETY: just checked whether we are pointing to the correct type
unsafe { Some(&mut *(self.as_any_mut() as *mut dyn Any as *mut T)) }
} else {
None
}
}
#[inline(always)]
fn downcast_typename_ref<T: Any>(&self) -> Option<&T> {
if self.is_typename::<T>() {
// SAFETY: just checked whether we are pointing to the correct type
unsafe { Some(&*(self.as_any() as *const dyn Any as *const T)) }
} else {
None
}
}
#[inline(always)]
fn is_typename<T: Any>(&self) -> bool {
true
// TODO: it would be nice to add type safety here, but the type names don't match
// println!("{} {}", type_name_of_val(self), type_name::<T>());
// type_name_of_val(self) == type_name::<T>()
}
}
pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str {
type_name::<T>()
}

View file

@ -4,11 +4,16 @@ use crate::{
entity::{Entity, EntityAllocator},
filter::{ChunksetFilterData, Filter},
storage::{Component, ComponentTypeId, Tag, TagTypeId},
world::{ComponentSource, ComponentTupleSet, IntoComponentSource, TagLayout, TagSet, World},
world::{
ComponentSource, ComponentTupleSet, IntoComponentSource, PreallocComponentSource,
TagLayout, TagSet, World, WorldId,
},
};
use derivative::Derivative;
use smallvec::SmallVec;
use std::ops::Range;
use std::{collections::VecDeque, iter::FromIterator, marker::PhantomData, sync::Arc};
use tracing::{span, Level};
/// This trait can be used to implement custom world writer types that can be directly
/// inserted into the command buffer, for more custom and complex world operations. This is analogous
@ -16,7 +21,7 @@ use std::{collections::VecDeque, iter::FromIterator, marker::PhantomData, sync::
/// access.
pub trait WorldWritable {
/// Destructs the writer and performs the write operations on the world.
fn write(self: Arc<Self>, world: &mut World);
fn write(self: Arc<Self>, world: &mut World, cmd: &CommandBuffer);
/// Returns the list of `ComponentTypeId` which are written by this command buffer. This is leveraged
/// to allow parralel command buffer flushing.
@ -38,17 +43,23 @@ struct InsertBufferedCommand<T, C> {
#[derivative(Debug = "ignore")]
components: C,
entities: Vec<Entity>,
entities: Range<usize>,
}
impl<T, C> WorldWritable for InsertBufferedCommand<T, C>
where
T: TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: ComponentSource,
{
fn write(self: Arc<Self>, world: &mut World) {
fn write(self: Arc<Self>, world: &mut World, cmd: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
world.insert_buffered(&consumed.entities, consumed.tags, consumed.components);
world.insert(
consumed.tags,
PreallocComponentSource::new(
cmd.pending_insertion[consumed.entities].iter().copied(),
consumed.components,
),
);
}
fn write_components(&self) -> Vec<ComponentTypeId> { self.write_components.clone() }
@ -71,7 +82,7 @@ where
T: TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: IntoComponentSource,
{
fn write(self: Arc<Self>, world: &mut World) {
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
world.insert(consumed.tags, consumed.components);
}
@ -84,7 +95,7 @@ where
#[derivative(Debug(bound = ""))]
struct DeleteEntityCommand(Entity);
impl WorldWritable for DeleteEntityCommand {
fn write(self: Arc<Self>, world: &mut World) { world.delete(self.0); }
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) { world.delete(self.0); }
fn write_components(&self) -> Vec<ComponentTypeId> { Vec::with_capacity(0) }
fn write_tags(&self) -> Vec<TagTypeId> { Vec::with_capacity(0) }
@ -101,9 +112,11 @@ impl<T> WorldWritable for AddTagCommand<T>
where
T: Tag,
{
fn write(self: Arc<Self>, world: &mut World) {
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
world.add_tag(consumed.entity, consumed.tag)
if let Err(err) = world.add_tag(consumed.entity, consumed.tag) {
tracing::error!(error = %err, "error adding tag");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { Vec::with_capacity(0) }
@ -120,7 +133,11 @@ impl<T> WorldWritable for RemoveTagCommand<T>
where
T: Tag,
{
fn write(self: Arc<Self>, world: &mut World) { world.remove_tag::<T>(self.entity) }
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
if let Err(err) = world.remove_tag::<T>(self.entity) {
tracing::error!(error = %err, "error removing tag");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { Vec::with_capacity(0) }
fn write_tags(&self) -> Vec<TagTypeId> { vec![TagTypeId::of::<T>()] }
@ -138,11 +155,11 @@ impl<C> WorldWritable for AddComponentCommand<C>
where
C: Component,
{
fn write(self: Arc<Self>, world: &mut World) {
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
let consumed = Arc::try_unwrap(self).unwrap();
world
.add_component::<C>(consumed.entity, consumed.component)
.unwrap();
if let Err(err) = world.add_component::<C>(consumed.entity, consumed.component) {
tracing::error!(error = %err, "error adding component");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { vec![ComponentTypeId::of::<C>()] }
@ -159,7 +176,11 @@ impl<C> WorldWritable for RemoveComponentCommand<C>
where
C: Component,
{
fn write(self: Arc<Self>, world: &mut World) { world.remove_component::<C>(self.entity) }
fn write(self: Arc<Self>, world: &mut World, _: &CommandBuffer) {
if let Err(err) = world.remove_component::<C>(self.entity) {
tracing::error!(error = %err, "error removing component");
}
}
fn write_components(&self) -> Vec<ComponentTypeId> { vec![ComponentTypeId::of::<C>()] }
fn write_tags(&self) -> Vec<TagTypeId> { Vec::with_capacity(0) }
@ -183,25 +204,27 @@ enum EntityCommand {
/// Inserting an entity using the `EntityBuilder`:
///
/// ```
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position(f32);
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Rotation(f32);
/// # let universe = Universe::new();
/// # let mut world = universe.create_world();
/// let mut command_buffer = CommandBuffer::from_world(&mut world);
/// command_buffer.build_entity().unwrap()
/// let mut command_buffer = CommandBuffer::new(&world);
/// command_buffer.start_entity()
/// .with_component(Position(123.0))
/// .with_component(Rotation(456.0)).build(&mut command_buffer);
/// .with_component(Rotation(456.0))
/// .build();
/// command_buffer.write(&mut world);
/// ```
pub struct EntityBuilder<TS = (), CS = ()> {
entity: Entity,
pub struct EntityBuilder<'a, TS = (), CS = ()> {
cmd: &'a mut CommandBuffer,
tags: TS,
components: CS,
}
impl<TS, CS> EntityBuilder<TS, CS>
impl<'a, TS, CS> EntityBuilder<'a, TS, CS>
where
TS: 'static + Send + ConsFlatten,
CS: 'static + Send + ConsFlatten,
@ -211,71 +234,48 @@ where
pub fn with_component<C: Component>(
self,
component: C,
) -> EntityBuilder<TS, <CS as ConsAppend<C>>::Output>
) -> EntityBuilder<'a, TS, <CS as ConsAppend<C>>::Output>
where
CS: ConsAppend<C>,
<CS as ConsAppend<C>>::Output: ConsFlatten,
{
EntityBuilder {
cmd: self.cmd,
components: ConsAppend::append(self.components, component),
entity: self.entity,
tags: self.tags,
}
}
/// Adds a tag to this builder, returning a new builder type containing that component type
/// and its data.
pub fn with_tag<T: Tag>(self, tag: T) -> EntityBuilder<<TS as ConsAppend<T>>::Output, CS>
pub fn with_tag<T: Tag>(self, tag: T) -> EntityBuilder<'a, <TS as ConsAppend<T>>::Output, CS>
where
TS: ConsAppend<T>,
<TS as ConsAppend<T>>::Output: ConsFlatten,
{
EntityBuilder {
cmd: self.cmd,
tags: ConsAppend::append(self.tags, tag),
entity: self.entity,
components: self.components,
}
}
/// Finalizes this builder type and submits it to the `CommandBuffer` as a `WorldWritable` trait
/// object.
pub fn build(self, buffer: &mut CommandBuffer)
/// Finalizes this builder type and submits it to the `CommandBuffer`.
pub fn build(self) -> Entity
where
<TS as ConsFlatten>::Output: TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
<TS as ConsFlatten>::Output: TagSet + TagLayout + for<'b> Filter<ChunksetFilterData<'b>>,
ComponentTupleSet<
<CS as ConsFlatten>::Output,
std::iter::Once<<CS as ConsFlatten>::Output>,
>: ComponentSource,
{
buffer
.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(InsertBufferedCommand {
write_components: Vec::default(),
write_tags: Vec::default(),
tags: self.tags.flatten(),
components: IntoComponentSource::into(std::iter::once(self.components.flatten())),
entities: vec![self.entity],
})));
self.cmd.insert(
self.tags.flatten(),
std::iter::once(self.components.flatten()),
)[0]
}
}
/// Errors returned by the `CommandBuffer`
#[derive(Debug)]
pub enum CommandError {
/// The command buffers entity cache has been exhausted. This is defaulted to 64 at `World::DEFAULT_COMMAND_BUFFER_SIZE`.
/// This upper limit can be changed via `SystemBuilder::with_command_buffer_size` for specific systems,
/// or globally via `World::set_command_buffer_size`.
EntityBlockFull,
}
impl std::fmt::Display for CommandError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "CommandError") }
}
impl std::error::Error for CommandError {
fn cause(&self) -> Option<&dyn std::error::Error> { None }
}
/// A command buffer used to queue mutable changes to the world from a system. This buffer is automatically
/// flushed and refreshed at the beginning of every frame by `Schedule`. If `Schedule` is not used,
/// then the user needs to manually flush it by performing `CommandBuffer::write`.
@ -295,71 +295,54 @@ impl std::error::Error for CommandError {
/// Inserting an entity using the `CommandBuffer`:
///
/// ```
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position(f32);
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Rotation(f32);
/// # let universe = Universe::new();
/// # let mut world = universe.create_world();
/// let mut command_buffer = CommandBuffer::from_world(&mut world);
/// let entity = command_buffer.create_entity().unwrap();
/// let mut command_buffer = CommandBuffer::new(&world);
/// let entity = command_buffer.start_entity().build();
///
/// command_buffer.add_component(entity, Position(123.0));
/// command_buffer.delete(entity);
///
/// command_buffer.write(&mut world);
/// ```
#[derive(Default)]
pub struct CommandBuffer {
world_id: WorldId,
commands: AtomicRefCell<VecDeque<EntityCommand>>,
entity_allocator: Option<Arc<EntityAllocator>>,
pub(crate) custom_capacity: Option<usize>,
pub(crate) free_list: SmallVec<[Entity; 64]>,
pub(crate) used_list: SmallVec<[Entity; 64]>,
entity_allocator: Arc<EntityAllocator>,
preallocated_capacity: usize,
free_list: SmallVec<[Entity; 64]>,
pending_insertion: SmallVec<[Entity; 64]>,
}
// This is safe because only 1 system in 1 execution is only ever accessing a command buffer
// and we garuntee the write operations of a command buffer occur in a safe manner
// and we gaurantee the write operations of a command buffer occur in a safe manner
unsafe impl Send for CommandBuffer {}
unsafe impl Sync for CommandBuffer {}
impl CommandBuffer {
/// Creates a `CommandBuffer` with a custom capacity of cached Entity's to be collected every frame.
/// Allocating a command buffer in this manner will overwrite `World::set_command_buffer_size` and
/// this system will always allocate the custom provide capacity of entities every frame.
///
/// # Notes
/// This function does not perform any actual entity preallocation. `ComamandBuffer:resize` or `CommandBuffer:write`
/// must be called before using the command buffer for the first time to make entities available.
pub fn with_capacity(capacity: usize) -> Self {
// Pull free entities from the world.
Self {
custom_capacity: Some(capacity),
free_list: SmallVec::with_capacity(capacity),
commands: Default::default(),
used_list: SmallVec::with_capacity(capacity),
entity_allocator: None,
}
}
/// Creates a `CommandBuffer` with a custom capacity of cached Entity's to be collected every frame.
/// Allocating a command buffer in this manner will overwrite `World::set_command_buffer_size` and
/// Allocating a command buffer in this manner will override `World::set_command_buffer_size` and
/// this system will always allocate the custom provide capacity of entities every frame.
///
/// This constructor will preallocate the first round of entities needed from the world.
pub fn from_world_with_capacity(world: &mut World, capacity: usize) -> Self {
pub fn new_with_capacity(world: &World, capacity: usize) -> Self {
// Pull free entities from the world.
let free_list =
SmallVec::from_iter((0..capacity).map(|_| world.entity_allocator.create_entity()));
Self {
world_id: world.id(),
free_list,
custom_capacity: Some(capacity),
preallocated_capacity: capacity,
commands: Default::default(),
used_list: SmallVec::with_capacity(capacity),
entity_allocator: Some(world.entity_allocator.clone()),
pending_insertion: SmallVec::new(),
entity_allocator: world.entity_allocator.clone(),
}
}
@ -368,43 +351,44 @@ impl CommandBuffer {
/// value.
///
/// This constructor will preallocate the first round of entities needed from the world.
pub fn from_world(world: &mut World) -> Self {
// Pull free entities from the world.
pub fn new(world: &World) -> Self {
let free_list = SmallVec::from_iter(
(0..world.command_buffer_size()).map(|_| world.entity_allocator.create_entity()),
);
Self {
world_id: world.id(),
free_list,
custom_capacity: None,
preallocated_capacity: world.command_buffer_size(),
commands: Default::default(),
used_list: SmallVec::with_capacity(world.command_buffer_size()),
entity_allocator: Some(world.entity_allocator.clone()),
pending_insertion: SmallVec::new(),
entity_allocator: world.entity_allocator.clone(),
}
}
/// Gets the ID of the world this command buffer belongs to.
pub fn world(&self) -> WorldId { self.world_id }
/// Changes the cached capacity of this `CommandBuffer` to the specified capacity. This includes shrinking
/// and growing the allocated entities, and possibly returning them to the entity allocator in the
/// case of a shrink.
///
/// This function does *NOT* set the `CommandBuffer::custom_capacity` override.
#[allow(clippy::comparison_chain)]
pub fn resize(&mut self, capacity: usize) {
fn resize(&mut self) {
let allocator = &self.entity_allocator;
let free_list = &mut self.free_list;
let capacity = self.preallocated_capacity;
if let Some(allocator) = allocator.as_ref() {
if free_list.len() < capacity {
(free_list.len()..capacity).for_each(|_| free_list.push(allocator.create_entity()));
} else if free_list.len() > capacity {
// Free the entities
(free_list.len() - capacity..capacity).for_each(|_| {
allocator.delete_entity(free_list.pop().unwrap());
});
if free_list.len() < capacity {
for entity in allocator.create_entities().take(capacity - free_list.len()) {
free_list.push(entity);
}
} else {
panic!("Entity allocator not assigned to command buffer")
} else if free_list.len() > capacity {
// Free the entities
(free_list.len() - capacity..capacity).for_each(|_| {
allocator.delete_entity(free_list.pop().unwrap());
});
}
}
@ -412,63 +396,47 @@ impl CommandBuffer {
///
/// Command flushes are performed in a FIFO manner, allowing for reliable, linear commands being
/// executed in the order they were provided.
///
/// This function also calls `CommandBuffer:resize`, performing any appropriate entity preallocation,
/// refilling the entity cache of any consumed entities.
pub fn write(&mut self, world: &mut World) {
tracing::trace!("Draining command buffer");
let span = span!(Level::TRACE, "Draining command buffer");
let _guard = span.enter();
if self.entity_allocator.is_none() {
self.entity_allocator = Some(world.entity_allocator.clone());
if self.world_id != world.id() {
panic!("command buffers may only write into their parent world");
}
let empty = Vec::from_iter((0..self.used_list.len()).map(|_| ()));
world.insert_buffered(
self.used_list.as_slice(),
(),
IntoComponentSource::into(empty),
);
self.used_list.clear();
while let Some(command) = self.commands.get_mut().pop_back() {
match command {
EntityCommand::WriteWorld(ptr) => ptr.write(world),
EntityCommand::WriteWorld(ptr) => ptr.write(world, self),
EntityCommand::ExecMutWorld(closure) => closure(world),
EntityCommand::ExecWorld(closure) => closure(world),
}
}
self.pending_insertion.clear();
// Refill our entity buffer from the world
if let Some(custom_capacity) = self.custom_capacity {
self.resize(custom_capacity);
} else {
self.resize(world.command_buffer_size());
}
self.resize();
}
/// Consumed an internally cached entity, returning an `EntityBuilder` using that entity.
pub fn build_entity(&mut self) -> Result<EntityBuilder<(), ()>, CommandError> {
let entity = self.create_entity()?;
Ok(EntityBuilder {
entity,
/// Creates an entity builder for constructing a new entity.
pub fn start_entity(&mut self) -> EntityBuilder<(), ()> {
EntityBuilder {
cmd: self,
tags: (),
components: (),
})
}
}
/// Consumed an internally cached entity, or returns `CommandError`
pub fn create_entity(&mut self) -> Result<Entity, CommandError> {
/// Allocates a new entity.
fn allocate_entity(&mut self) -> Entity {
if self.free_list.is_empty() {
self.resize(
self.custom_capacity
.unwrap_or(World::DEFAULT_COMMAND_BUFFER_SIZE),
);
self.resize();
}
let entity = self.free_list.pop().ok_or(CommandError::EntityBlockFull)?;
self.used_list.push(entity);
Ok(entity)
let entity = self
.free_list
.pop()
.unwrap_or_else(|| self.entity_allocator.create_entity());
self.pending_insertion.push(entity);
entity
}
/// Executes an arbitrary closure against the mutable world, allowing for queued exclusive
@ -485,7 +453,7 @@ impl CommandBuffer {
/// Inserts an arbitrary implementor of the `WorldWritable` trait into the command queue.
/// This can be leveraged for creating custom `WorldWritable` trait implementors, and is used
/// internally for the default writers.
pub fn insert_writer<W>(&self, writer: W)
fn insert_writer<W>(&self, writer: W)
where
W: 'static + WorldWritable,
{
@ -494,46 +462,23 @@ impl CommandBuffer {
.push_front(EntityCommand::WriteWorld(Arc::new(writer)));
}
/// Queues an *unbuffered* insertion into the world. This command follows the same syntax as
/// the normal `World::insert`, except for one caviate - entities are NOT returned by this
/// function, meaning that the internal entity cache and limits of this `CommandBuffer` are not
/// applicable to this function.
///
/// This function can be considered a "fire and forget" entity creation method which is not bound
/// by the standard command buffer size limits of the other entity insertion functions. This allows
/// for mass insertion of entities, exceeding the command buffer sizes, to occur in scenarios that
/// the entities do not need to be retrieved.
pub fn insert_unbuffered<T, C>(&mut self, tags: T, components: C)
where
T: 'static + TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: 'static + IntoComponentSource,
{
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(InsertCommand {
write_components: Vec::default(),
write_tags: Vec::default(),
tags,
components,
})));
}
/// Queues an insertion into the world. This command follows the same syntax as
/// the normal `World::insert`, returning the entities created for this command.
pub fn insert<T, C>(&mut self, tags: T, components: C) -> Result<Vec<Entity>, CommandError>
pub fn insert<T, C>(&mut self, tags: T, components: C) -> &[Entity]
where
T: 'static + TagSet + TagLayout + for<'a> Filter<ChunksetFilterData<'a>>,
C: 'static + IntoComponentSource,
{
let components = components.into();
if components.len() > self.free_list.len() {
return Err(CommandError::EntityBlockFull);
let start = self.pending_insertion.len();
let count = components.len();
self.pending_insertion.reserve(count);
for _ in 0..count {
self.allocate_entity();
}
let mut entities = Vec::with_capacity(components.len());
for _ in 0..components.len() {
entities.push(self.free_list.pop().ok_or(CommandError::EntityBlockFull)?);
}
let range = start..self.pending_insertion.len();
self.commands
.get_mut()
@ -542,65 +487,43 @@ impl CommandBuffer {
write_tags: Vec::default(),
tags,
components,
entities: entities.clone(),
entities: range.clone(),
})));
Ok(entities)
&self.pending_insertion[range]
}
/// Queues the deletion of an entity in the command buffer. This writer calls `World::delete`
pub fn delete(&self, entity: Entity) {
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(DeleteEntityCommand(
entity,
))));
}
pub fn delete(&self, entity: Entity) { self.insert_writer(DeleteEntityCommand(entity)); }
/// Queues the addition of a component from an entity in the command buffer.
/// This writer calls `World::add_component`
pub fn add_component<C: Component>(&self, entity: Entity, component: C) {
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(AddComponentCommand {
entity,
component,
})));
self.insert_writer(AddComponentCommand { entity, component });
}
/// Queues the removal of a component from an entity in the command buffer.
/// This writer calls `World::remove_component`
pub fn remove_component<C: Component>(&self, entity: Entity) {
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(
RemoveComponentCommand {
entity,
_marker: PhantomData::<C>::default(),
},
)));
self.insert_writer(RemoveComponentCommand {
entity,
_marker: PhantomData::<C>::default(),
});
}
/// Queues the addition of a tag from an entity in the command buffer.
/// This writer calls `World::add_tag`
pub fn add_tag<T: Tag>(&self, entity: Entity, tag: T) {
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(AddTagCommand {
entity,
tag,
})));
self.insert_writer(AddTagCommand { entity, tag });
}
/// Queues the removal of a tag from an entity in the command buffer.
/// This writer calls `World::remove_tag`
pub fn remove_tag<T: Tag>(&self, entity: Entity) {
self.commands
.get_mut()
.push_front(EntityCommand::WriteWorld(Arc::new(RemoveTagCommand {
entity,
_marker: PhantomData::<T>::default(),
})));
self.insert_writer(RemoveTagCommand {
entity,
_marker: PhantomData::<T>::default(),
});
}
/// Returns the current number of commands already queued in this `CommandBuffer` instance.
@ -612,6 +535,18 @@ impl CommandBuffer {
pub fn is_empty(&self) -> bool { self.commands.get().len() == 0 }
}
impl Drop for CommandBuffer {
fn drop(&mut self) {
while let Some(entity) = self.free_list.pop() {
self.entity_allocator.delete_entity(entity);
}
while let Some(entity) = self.pending_insertion.pop() {
self.entity_allocator.delete_entity(entity);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -625,7 +560,7 @@ mod tests {
struct TestResource(pub i32);
#[test]
fn create_entity_test() -> Result<(), CommandError> {
fn create_entity_test() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
@ -638,9 +573,9 @@ mod tests {
let components_len = components.len();
//world.entity_allocator.get_block()
let mut command = CommandBuffer::from_world(&mut world);
let entity1 = command.create_entity()?;
let entity2 = command.create_entity()?;
let mut command = CommandBuffer::new(&world);
let entity1 = command.start_entity().build();
let entity2 = command.start_entity().build();
command.add_component(entity1, Pos(1., 2., 3.));
command.add_component(entity2, Pos(4., 5., 6.));
@ -650,17 +585,15 @@ mod tests {
let query = Read::<Pos>::query();
let mut count = 0;
for _ in query.iter_entities(&mut world) {
for _ in query.iter_entities(&world) {
count += 1;
}
assert_eq!(components_len, count);
Ok(())
}
#[test]
fn simple_write_test() -> Result<(), CommandError> {
fn simple_write_test() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
@ -673,8 +606,8 @@ mod tests {
let components_len = components.len();
//world.entity_allocator.get_block()
let mut command = CommandBuffer::from_world(&mut world);
let _ = command.insert((), components)?;
let mut command = CommandBuffer::new(&world);
let _ = command.insert((), components);
// Assert writing checks
// TODO:
@ -693,7 +626,5 @@ mod tests {
}
assert_eq!(components_len, count);
Ok(())
}
}

View file

@ -1,153 +1,155 @@
// Things happen here, and they work.
// ,---.
// / |
// / |
// / |
// / |
// ___,' |
// < -' :
// `-.__..--'``-,_\_
// |o/ ` :,.)_`>
// :/ ` ||/)
// (_.).__,-` |\
// /( `.`` `| :
// \'`-.) ` ; ;
// | ` /-<
// | ` / `.
// ,-_-..____ /| ` :__..-'\
// ,'-.__\\ ``-./ :` ; \
//`\ `\ `\\ \ : ( ` / , `. \
// \` \ \\ | | ` : : .\ \
// \ `\_ )) : ; | | ): :
// (`-.-'\ || |\ \ ` ; ; | |
// \-_ `;;._ ( ` / /_ | |
// `-.-.// ,'`-._\__/_,' ; |
// \:: : / ` , / |
// || | ( ,' / / |
// || ,' / |
/// Prepend a new type into a cons list
pub trait ConsPrepend<T> {
/// Result of prepend
type Output;
/// Prepend to runtime cons value
fn prepend(self, t: T) -> Self::Output;
}
impl<T> ConsPrepend<T> for () {
type Output = (T, Self);
fn prepend(self, t: T) -> Self::Output { (t, self) }
}
impl<T, A, B> ConsPrepend<T> for (A, B) {
type Output = (T, Self);
fn prepend(self, t: T) -> Self::Output { (t, self) }
}
/// Prepend a new type into a cons list
pub trait ConsAppend<T> {
/// Result of append
type Output;
/// Prepend to runtime cons value
fn append(self, t: T) -> Self::Output;
}
impl<T> ConsAppend<T> for () {
type Output = (T, Self);
fn append(self, t: T) -> Self::Output { (t, ()) }
}
impl<T, A, B: ConsAppend<T>> ConsAppend<T> for (A, B) {
type Output = (A, <B as ConsAppend<T>>::Output);
fn append(self, t: T) -> Self::Output {
let (a, b) = self;
(a, b.append(t))
}
}
/// transform cons list into a flat tuple
pub trait ConsFlatten {
/// Flattened tuple
type Output;
/// Flatten runtime cons value
fn flatten(self) -> Self::Output;
}
impl ConsFlatten for () {
type Output = ();
fn flatten(self) -> Self::Output { self }
}
macro_rules! cons {
() => (
()
);
($head:tt) => (
($head, ())
);
($head:tt, $($tail:tt),*) => (
($head, cons!($($tail),*))
);
}
macro_rules! impl_flatten {
($($items:ident),*) => {
#[allow(unused_parens)] // This is added because the nightly compiler complains
impl<$($items),*> ConsFlatten for cons!($($items),*)
{
type Output = ($($items),*);
fn flatten(self) -> Self::Output {
#[allow(non_snake_case)]
let cons!($($items),*) = self;
($($items),*)
}
}
impl_flatten!(@ $($items),*);
};
(@ $head:ident, $($tail:ident),*) => {
impl_flatten!($($tail),*);
};
(@ $head:ident) => {};
}
impl_flatten!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);
fn test_api() {}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn cons_macro() {
assert_eq!(cons!(), ());
assert_eq!(cons!(1), (1, ()));
assert_eq!(cons!(1, 2, 3, 4), (1, (2, (3, (4, ())))));
}
#[test]
fn cons_prepend() {
assert_eq!(().prepend(123), (123, ()));
assert_eq!(
cons!(1, 2, 3, 4, 5).prepend(123).prepend(15),
cons!(15, 123, 1, 2, 3, 4, 5)
);
}
#[test]
fn cons_append() {
assert_eq!(().append(123), (123, ()));
assert_eq!(
cons!(1, 2, 3, 4, 5).append(123).append(15),
cons!(1, 2, 3, 4, 5, 123, 15)
);
}
#[test]
fn cons_flatten() {
assert_eq!(().flatten(), ());
assert_eq!((1, ()).flatten(), 1);
assert_eq!(cons!(1, 2, 3, 4, 5).flatten(), (1, 2, 3, 4, 5));
}
}
// Things happen here, and they work.
// ,---.
// / |
// / |
// / |
// / |
// ___,' |
// < -' :
// `-.__..--'``-,_\_
// |o/ ` :,.)_`>
// :/ ` ||/)
// (_.).__,-` |\
// /( `.`` `| :
// \'`-.) ` ; ;
// | ` /-<
// | ` / `.
// ,-_-..____ /| ` :__..-'\
// ,'-.__\\ ``-./ :` ; \
//`\ `\ `\\ \ : ( ` / , `. \
// \` \ \\ | | ` : : .\ \
// \ `\_ )) : ; | | ): :
// (`-.-'\ || |\ \ ` ; ; | |
// \-_ `;;._ ( ` / /_ | |
// `-.-.// ,'`-._\__/_,' ; |
// \:: : / ` , / |
// || | ( ,' / / |
// || ,' / |
/// Prepend a new type into a cons list
pub trait ConsPrepend<T> {
/// Result of prepend
type Output;
/// Prepend to runtime cons value
fn prepend(self, t: T) -> Self::Output;
}
impl<T> ConsPrepend<T> for () {
type Output = (T, Self);
fn prepend(self, t: T) -> Self::Output { (t, self) }
}
impl<T, A, B> ConsPrepend<T> for (A, B) {
type Output = (T, Self);
fn prepend(self, t: T) -> Self::Output { (t, self) }
}
/// Prepend a new type into a cons list
pub trait ConsAppend<T> {
/// Result of append
type Output;
/// Prepend to runtime cons value
fn append(self, t: T) -> Self::Output;
}
impl<T> ConsAppend<T> for () {
type Output = (T, Self);
fn append(self, t: T) -> Self::Output { (t, ()) }
}
impl<T, A, B: ConsAppend<T>> ConsAppend<T> for (A, B) {
type Output = (A, <B as ConsAppend<T>>::Output);
fn append(self, t: T) -> Self::Output {
let (a, b) = self;
(a, b.append(t))
}
}
/// transform cons list into a flat tuple
pub trait ConsFlatten {
/// Flattened tuple
type Output;
/// Flatten runtime cons value
fn flatten(self) -> Self::Output;
}
impl ConsFlatten for () {
type Output = ();
fn flatten(self) -> Self::Output { self }
}
macro_rules! cons {
() => (
()
);
($head:tt) => (
($head, ())
);
($head:tt, $($tail:tt),*) => (
($head, cons!($($tail),*))
);
}
macro_rules! impl_flatten {
($($items:ident),*) => {
#[allow(unused_parens)] // This is added because the nightly compiler complains
impl<$($items),*> ConsFlatten for cons!($($items),*)
{
type Output = ($($items),*);
fn flatten(self) -> Self::Output {
#[allow(non_snake_case)]
let cons!($($items),*) = self;
($($items),*)
}
}
impl_flatten!(@ $($items),*);
};
(@ $head:ident, $($tail:ident),*) => {
impl_flatten!($($tail),*);
};
(@ $head:ident) => {};
}
impl_flatten!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);
fn test_api() {}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn cons_macro() {
#![allow(clippy::unit_cmp)]
assert_eq!(cons!(), ());
assert_eq!(cons!(1), (1, ()));
assert_eq!(cons!(1, 2, 3, 4), (1, (2, (3, (4, ())))));
}
#[test]
fn cons_prepend() {
assert_eq!(().prepend(123), (123, ()));
assert_eq!(
cons!(1, 2, 3, 4, 5).prepend(123).prepend(15),
cons!(15, 123, 1, 2, 3, 4, 5)
);
}
#[test]
fn cons_append() {
assert_eq!(().append(123), (123, ()));
assert_eq!(
cons!(1, 2, 3, 4, 5).append(123).append(15),
cons!(1, 2, 3, 4, 5, 123, 15)
);
}
#[test]
fn cons_flatten() {
#![allow(clippy::unit_cmp)]
assert_eq!(().flatten(), ());
assert_eq!((1, ()).flatten(), 1);
assert_eq!(cons!(1, 2, 3, 4, 5).flatten(), (1, 2, 3, 4, 5));
}
}

View file

@ -1,9 +1,15 @@
use parking_lot::{Mutex, RwLock};
use crate::index::ArchetypeIndex;
use crate::index::ChunkIndex;
use crate::index::ComponentIndex;
use crate::index::SetIndex;
use parking_lot::{Mutex, RwLock, RwLockWriteGuard};
use std::fmt::Display;
use std::num::Wrapping;
use std::ops::Deref;
use std::ops::DerefMut;
use std::sync::Arc;
pub(crate) type EntityIndex = u32;
pub type EntityIndex = u32;
pub(crate) type EntityVersion = Wrapping<u32>;
/// A handle to an entity.
@ -18,7 +24,7 @@ impl Entity {
Entity { index, version }
}
pub(crate) fn index(self) -> EntityIndex { self.index }
pub fn index(self) -> EntityIndex { self.index }
}
impl Display for Entity {
@ -28,19 +34,19 @@ impl Display for Entity {
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub(crate) struct EntityLocation {
archetype_index: usize,
set_index: usize,
chunk_index: usize,
component_index: usize,
pub struct EntityLocation {
archetype_index: ArchetypeIndex,
set_index: SetIndex,
chunk_index: ChunkIndex,
component_index: ComponentIndex,
}
impl EntityLocation {
pub(crate) fn new(
archetype_index: usize,
set_index: usize,
chunk_index: usize,
component_index: usize,
archetype_index: ArchetypeIndex,
set_index: SetIndex,
chunk_index: ChunkIndex,
component_index: ComponentIndex,
) -> Self {
EntityLocation {
archetype_index,
@ -50,13 +56,58 @@ impl EntityLocation {
}
}
pub(crate) fn archetype(&self) -> usize { self.archetype_index }
pub fn archetype(&self) -> ArchetypeIndex { self.archetype_index }
pub(crate) fn set(&self) -> usize { self.set_index }
pub fn set(&self) -> SetIndex { self.set_index }
pub(crate) fn chunk(&self) -> usize { self.chunk_index }
pub fn chunk(&self) -> ChunkIndex { self.chunk_index }
pub(crate) fn component(&self) -> usize { self.component_index }
pub fn component(&self) -> ComponentIndex { self.component_index }
}
pub(crate) struct Locations {
blocks: Vec<Option<Vec<EntityLocation>>>,
}
impl Locations {
pub fn new() -> Self { Locations { blocks: Vec::new() } }
fn index(entity: EntityIndex) -> (usize, usize) {
let block = entity as usize / BlockAllocator::BLOCK_SIZE;
let index = entity as usize - block * BlockAllocator::BLOCK_SIZE;
(block, index)
}
pub fn get(&self, entity: Entity) -> Option<EntityLocation> {
let (block, index) = Locations::index(entity.index());
self.blocks
.get(block)
.map(|b| b.as_ref())
.flatten()
.map(|b| b[index])
}
pub fn set(&mut self, entity: Entity, location: EntityLocation) {
let (block_index, index) = Locations::index(entity.index());
if self.blocks.len() <= block_index {
let fill = block_index - self.blocks.len() + 1;
self.blocks.extend((0..fill).map(|_| None));
}
let block_opt = &mut self.blocks[block_index];
let block = block_opt.get_or_insert_with(|| {
std::iter::repeat(EntityLocation::new(
ArchetypeIndex(0),
SetIndex(0),
ChunkIndex(0),
ComponentIndex(0),
))
.take(BlockAllocator::BLOCK_SIZE)
.collect()
});
block[index] = location;
}
}
#[derive(Debug)]
@ -89,12 +140,11 @@ impl BlockAllocator {
}
#[derive(Debug)]
pub(crate) struct EntityBlock {
pub struct EntityBlock {
start: EntityIndex,
len: usize,
versions: Vec<EntityVersion>,
free: Vec<EntityIndex>,
locations: Vec<EntityLocation>,
}
impl EntityBlock {
@ -104,9 +154,6 @@ impl EntityBlock {
len,
versions: Vec::with_capacity(len),
free: Vec::new(),
locations: std::iter::repeat(EntityLocation::new(0, 0, 0, 0))
.take(len)
.collect(),
}
}
@ -138,45 +185,80 @@ impl EntityBlock {
}
}
pub fn free(&mut self, entity: Entity) -> Option<EntityLocation> {
pub fn free(&mut self, entity: Entity) -> bool {
if let Some(true) = self.is_alive(entity) {
let i = self.index(entity.index);
self.versions[i] += Wrapping(1);
self.free.push(entity.index);
self.get_location(entity.index)
true
} else {
None
false
}
}
pub fn set_location(&mut self, entity: EntityIndex, location: EntityLocation) {
assert!(entity >= self.start);
let index = (entity - self.start) as usize;
*self.locations.get_mut(index).unwrap() = location;
}
pub fn get_location(&self, entity: EntityIndex) -> Option<EntityLocation> {
if entity < self.start {
return None;
}
let index = (entity - self.start) as usize;
self.locations.get(index).copied()
}
}
#[derive(Debug)]
struct Blocks {
blocks: Vec<Option<EntityBlock>>,
}
impl Blocks {
fn new() -> Self { Self { blocks: Vec::new() } }
pub fn index(entity: EntityIndex) -> usize { entity as usize / BlockAllocator::BLOCK_SIZE }
fn find(&self, entity: EntityIndex) -> Option<&EntityBlock> {
let i = Blocks::index(entity);
self.blocks.get(i).map(|b| b.as_ref()).flatten()
}
fn find_mut(&mut self, entity: EntityIndex) -> Option<&mut EntityBlock> {
let i = Blocks::index(entity);
self.blocks.get_mut(i).map(|b| b.as_mut()).flatten()
}
fn push(&mut self, block: EntityBlock) -> usize {
let i = Blocks::index(block.start);
if self.blocks.len() > i {
self.blocks[i] = Some(block);
} else {
let fill = i - self.blocks.len();
self.blocks.extend((0..fill).map(|_| None));
self.blocks.push(Some(block));
}
i
}
fn append(&mut self, other: &mut Blocks) {
for block in other.blocks.drain(..) {
if let Some(block) = block {
self.push(block);
}
}
}
}
impl Deref for Blocks {
type Target = [Option<EntityBlock>];
fn deref(&self) -> &Self::Target { self.blocks.deref() }
}
impl DerefMut for Blocks {
fn deref_mut(&mut self) -> &mut Self::Target { self.blocks.deref_mut() }
}
/// Manages the allocation and deletion of `Entity` IDs within a world.
#[derive(Debug, Clone)]
#[derive(Debug)]
pub struct EntityAllocator {
allocator: Arc<Mutex<BlockAllocator>>,
blocks: Arc<RwLock<Vec<EntityBlock>>>,
blocks: RwLock<Blocks>,
}
impl EntityAllocator {
pub(crate) fn new(allocator: Arc<Mutex<BlockAllocator>>) -> Self {
EntityAllocator {
allocator,
blocks: Arc::new(RwLock::new(Vec::new())),
blocks: RwLock::new(Blocks::new()),
}
}
@ -184,59 +266,99 @@ impl EntityAllocator {
pub fn is_alive(&self, entity: Entity) -> bool {
self.blocks
.read()
.iter()
.filter_map(|b| b.is_alive(entity))
.nth(0)
.find(entity.index())
.map(|b| b.is_alive(entity))
.flatten()
.unwrap_or(false)
}
/// Allocates a new unused `Entity` ID.
pub fn create_entity(&self) -> Entity {
let mut blocks = self.blocks.write();
pub fn create_entity(&self) -> Entity { self.create_entities().next().unwrap() }
if let Some(entity) = blocks.iter_mut().rev().filter_map(|b| b.allocate()).nth(0) {
entity
} else {
let mut block = self.allocator.lock().allocate();
let entity = block.allocate().unwrap();
blocks.push(block);
entity
/// Creates an iterator which allocates new `Entity` IDs.
pub fn create_entities(&self) -> CreateEntityIter {
CreateEntityIter {
blocks: self.blocks.write(),
allocator: &self.allocator,
current_block: None,
}
}
pub(crate) fn delete_entity(&self, entity: Entity) -> Option<EntityLocation> {
self.blocks.write().iter_mut().find_map(|b| b.free(entity))
}
pub(crate) fn set_location(&self, entity: EntityIndex, location: EntityLocation) {
pub(crate) fn delete_entity(&self, entity: Entity) -> bool {
self.blocks
.write()
.iter_mut()
.rev()
.find(|b| b.in_range(entity))
.unwrap()
.set_location(entity, location);
.find_mut(entity.index())
.map(|b| b.free(entity))
.unwrap_or(false)
}
pub(crate) fn get_location(&self, entity: EntityIndex) -> Option<EntityLocation> {
self.blocks
.read()
.iter()
.find(|b| b.in_range(entity))
.and_then(|b| b.get_location(entity))
pub(crate) fn delete_all_entities(&self) {
for block in self.blocks.write().blocks.drain(..) {
if let Some(mut block) = block {
// If any entity in the block is in an allocated state, clear
// and repopulate the free list. This forces all entities into an
// unallocated state. Bump versions of all entity indexes to
// ensure that we don't reuse the same entity.
if block.free.len() < block.versions.len() {
block.free.clear();
for (i, version) in block.versions.iter_mut().enumerate() {
*version += Wrapping(1);
block.free.push(i as u32 + block.start);
}
}
self.allocator.lock().free(block);
}
}
}
pub(crate) fn merge(&self, other: EntityAllocator) {
assert!(Arc::ptr_eq(&self.allocator, &other.allocator));
self.blocks.write().append(&mut other.blocks.write());
self.blocks.write().append(&mut *other.blocks.write());
}
}
impl Drop for EntityAllocator {
fn drop(&mut self) {
for block in self.blocks.write().drain(..) {
self.allocator.lock().free(block);
fn drop(&mut self) { self.delete_all_entities(); }
}
pub struct CreateEntityIter<'a> {
current_block: Option<usize>,
blocks: RwLockWriteGuard<'a, Blocks>,
allocator: &'a Mutex<BlockAllocator>,
}
impl<'a> Iterator for CreateEntityIter<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
// try and allocate from the block we last used
if let Some(block) = self.current_block {
if let Some(entity) = self.blocks[block].as_mut().unwrap().allocate() {
return Some(entity);
}
}
// search for a block with spare entities
for (i, allocated) in self
.blocks
.iter_mut()
.enumerate()
.rev()
.filter(|(_, b)| b.is_some())
.map(|(i, b)| (i, b.as_mut().unwrap().allocate()))
{
if let Some(entity) = allocated {
self.current_block = Some(i);
return Some(entity);
}
}
// allocate a new block
let mut block = self.allocator.lock().allocate();
let entity = block.allocate().unwrap();
self.current_block = Some(self.blocks.push(block));
Some(entity)
}
}
@ -311,7 +433,7 @@ mod tests {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = allocator.create_entity();
assert_eq!(true, allocator.delete_entity(entity).is_some());
assert_eq!(true, allocator.delete_entity(entity));
}
#[test]
@ -320,7 +442,7 @@ mod tests {
let entity = allocator.create_entity();
allocator.delete_entity(entity);
assert_eq!(None, allocator.delete_entity(entity));
assert_eq!(false, allocator.delete_entity(entity));
}
#[test]
@ -328,14 +450,14 @@ mod tests {
let allocator = EntityAllocator::new(Arc::from(Mutex::new(BlockAllocator::new())));
let entity = Entity::new(10 as EntityIndex, Wrapping(10));
assert_eq!(None, allocator.delete_entity(entity));
assert_eq!(false, allocator.delete_entity(entity));
}
#[test]
fn multiple_allocators_unique_ids() {
let blocks = Arc::from(Mutex::new(BlockAllocator::new()));
let allocator_a = EntityAllocator::new(blocks.clone());
let allocator_b = EntityAllocator::new(blocks.clone());
let allocator_b = EntityAllocator::new(blocks);
let mut entities_a = HashSet::<Entity>::default();
let mut entities_b = HashSet::<Entity>::default();

View file

@ -2,6 +2,9 @@ use crate::entity::Entity;
use crate::filter::{
ArchetypeFilterData, ChunkFilterData, ChunksetFilterData, EntityFilter, Filter, FilterResult,
};
use crate::index::ArchetypeIndex;
use crate::index::ChunkIndex;
use crate::index::SetIndex;
use crate::storage::ArchetypeId;
use crate::storage::ChunkId;
use crossbeam_channel::{Sender, TrySendError};
@ -21,15 +24,19 @@ pub enum Event {
}
pub(crate) trait EventFilter: Send + Sync + 'static {
fn matches_archetype(&self, data: ArchetypeFilterData, index: usize) -> bool;
fn matches_chunkset(&self, data: ChunksetFilterData, index: usize) -> bool;
fn matches_chunk(&self, data: ChunkFilterData, index: usize) -> bool;
fn matches_archetype(&self, data: ArchetypeFilterData, index: ArchetypeIndex) -> bool;
fn matches_chunkset(&self, data: ChunksetFilterData, index: SetIndex) -> bool;
fn matches_chunk(&self, data: ChunkFilterData, index: ChunkIndex) -> bool;
}
pub(crate) struct EventFilterWrapper<T: EntityFilter + Sync + 'static>(pub T);
impl<T: EntityFilter + Sync + 'static> EventFilter for EventFilterWrapper<T> {
fn matches_archetype(&self, data: ArchetypeFilterData, index: usize) -> bool {
fn matches_archetype(
&self,
data: ArchetypeFilterData,
ArchetypeIndex(index): ArchetypeIndex,
) -> bool {
let (filter, _, _) = self.0.filters();
if let Some(element) = filter.collect(data).nth(index) {
return filter.is_match(&element).is_pass();
@ -38,7 +45,7 @@ impl<T: EntityFilter + Sync + 'static> EventFilter for EventFilterWrapper<T> {
false
}
fn matches_chunkset(&self, data: ChunksetFilterData, index: usize) -> bool {
fn matches_chunkset(&self, data: ChunksetFilterData, SetIndex(index): SetIndex) -> bool {
let (_, filter, _) = self.0.filters();
if let Some(element) = filter.collect(data).nth(index) {
return filter.is_match(&element).is_pass();
@ -47,7 +54,7 @@ impl<T: EntityFilter + Sync + 'static> EventFilter for EventFilterWrapper<T> {
false
}
fn matches_chunk(&self, data: ChunkFilterData, index: usize) -> bool {
fn matches_chunk(&self, data: ChunkFilterData, ChunkIndex(index): ChunkIndex) -> bool {
let (_, _, filter) = self.0.filters();
if let Some(element) = filter.collect(data).nth(index) {
return filter.is_match(&element).is_pass();
@ -93,7 +100,7 @@ impl Subscribers {
}
}
pub fn matches_archetype(&self, data: ArchetypeFilterData, index: usize) -> Self {
pub fn matches_archetype(&self, data: ArchetypeFilterData, index: ArchetypeIndex) -> Self {
let subscribers = self
.subscribers
.iter()
@ -103,7 +110,7 @@ impl Subscribers {
Self { subscribers }
}
pub fn matches_chunkset(&self, data: ChunksetFilterData, index: usize) -> Self {
pub fn matches_chunkset(&self, data: ChunksetFilterData, index: SetIndex) -> Self {
let subscribers = self
.subscribers
.iter()
@ -113,7 +120,7 @@ impl Subscribers {
Self { subscribers }
}
pub fn matches_chunk(&self, data: ChunkFilterData, index: usize) -> Self {
pub fn matches_chunk(&self, data: ChunkFilterData, index: ChunkIndex) -> Self {
let subscribers = self
.subscribers
.iter()

View file

@ -1,3 +1,6 @@
use crate::index::ArchetypeIndex;
use crate::index::ChunkIndex;
use crate::index::SetIndex;
use crate::iterator::FissileZip;
use crate::iterator::SliceVecIter;
use crate::storage::ArchetypeData;
@ -364,12 +367,12 @@ pub struct FilterArchIter<'a, 'b, F: Filter<ArchetypeFilterData<'a>>> {
}
impl<'a, 'b, F: Filter<ArchetypeFilterData<'a>>> Iterator for FilterArchIter<'a, 'b, F> {
type Item = usize;
type Item = ArchetypeIndex;
fn next(&mut self) -> Option<Self::Item> {
if let Some((i, data)) = self.archetypes.next() {
if self.filter.is_match(&data).is_pass() {
return Some(i);
return Some(ArchetypeIndex(i));
}
}
@ -384,12 +387,12 @@ pub struct FilterChunkIter<'a, 'b, F: Filter<ChunksetFilterData<'a>>> {
}
impl<'a, 'b, F: Filter<ChunksetFilterData<'a>>> Iterator for FilterChunkIter<'a, 'b, F> {
type Item = usize;
type Item = SetIndex;
fn next(&mut self) -> Option<Self::Item> {
if let Some((i, data)) = self.chunks.next() {
if self.filter.is_match(&data).is_pass() {
return Some(i);
return Some(SetIndex(i));
}
}
@ -414,14 +417,14 @@ pub struct FilterEntityIter<
impl<'a, 'b, Arch: Filter<ArchetypeFilterData<'a>>, Chunk: Filter<ChunksetFilterData<'a>>> Iterator
for FilterEntityIter<'a, 'b, Arch, Chunk>
{
type Item = (ArchetypeId, usize);
type Item = (ArchetypeId, ChunkIndex);
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some((arch_id, ref mut chunks)) = self.chunks {
for (chunk_index, chunk_data) in chunks {
if self.chunk_filter.is_match(&chunk_data).is_pass() {
return Some((arch_id, chunk_index));
return Some((arch_id, ChunkIndex(chunk_index)));
}
}
}
@ -794,6 +797,12 @@ impl_and_filter!(A => a, B => b, C => c);
impl_and_filter!(A => a, B => b, C => c, D => d);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k);
impl_and_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k, L => l);
/// A filter which requires that any filter within `T` match.
#[derive(Debug, Clone)]
@ -889,6 +898,12 @@ impl_or_filter!(A => a, B => b, C => c);
impl_or_filter!(A => a, B => b, C => c, D => d);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k);
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k, L => l);
/// A filter qhich requires that all chunks contain entity data components of type `T`.
#[derive(Debug)]

View file

@ -0,0 +1,59 @@
use crate::entity::Entity;
use crate::storage::ArchetypeData;
use crate::storage::Chunkset;
use crate::storage::ComponentStorage;
use std::fmt;
use std::ops::Deref;
use std::ops::Index;
use std::ops::IndexMut;
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct SetIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct ChunkIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct ArchetypeIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct ComponentIndex(pub usize);
macro_rules! impl_index {
($index_ty:ty: $output_ty:ty) => {
impl Index<$index_ty> for [$output_ty] {
type Output = $output_ty;
#[inline(always)]
fn index(&self, index: $index_ty) -> &Self::Output { &self[index.0] }
}
impl IndexMut<$index_ty> for [$output_ty] {
#[inline(always)]
fn index_mut(&mut self, index: $index_ty) -> &mut Self::Output { &mut self[index.0] }
}
impl Index<$index_ty> for Vec<$output_ty> {
type Output = $output_ty;
#[inline(always)]
fn index(&self, index: $index_ty) -> &Self::Output { &self[index.0] }
}
impl IndexMut<$index_ty> for Vec<$output_ty> {
#[inline(always)]
fn index_mut(&mut self, index: $index_ty) -> &mut Self::Output { &mut self[index.0] }
}
impl Deref for $index_ty {
type Target = usize;
#[inline(always)]
fn deref(&self) -> &usize { &self.0 }
}
impl fmt::Display for $index_ty {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
};
}
impl_index!(SetIndex: Chunkset);
impl_index!(ChunkIndex: ComponentStorage);
impl_index!(ArchetypeIndex: ArchetypeData);
impl_index!(ComponentIndex: Entity);

View file

@ -0,0 +1,30 @@
#![allow(dead_code)]
pub mod borrow;
pub mod command;
pub mod cons;
pub mod entity;
pub mod event;
pub mod filter;
pub mod index;
pub mod iterator;
pub mod query;
pub mod storage;
pub mod world;
#[cfg(feature = "serialize")]
pub mod serialize;
mod tuple;
mod zip;
pub mod prelude {
pub use crate::{
command::CommandBuffer,
entity::Entity,
event::Event,
filter::filter_fns::*,
query::{IntoQuery, Query, Read, Tagged, TryRead, TryWrite, Write},
world::{Universe, World},
};
}

View file

@ -16,6 +16,8 @@ use crate::filter::Filter;
use crate::filter::FilterResult;
use crate::filter::Passthrough;
use crate::filter::TagFilter;
use crate::index::ChunkIndex;
use crate::index::SetIndex;
#[cfg(feature = "par-iter")]
use crate::iterator::{FissileEnumerate, FissileIterator};
use crate::storage::ArchetypeData;
@ -51,7 +53,8 @@ pub trait View<'a>: Sized + Send + Sync + 'static {
fn fetch(
archetype: &'a ArchetypeData,
chunk: &'a ComponentStorage,
chunk_index: usize,
chunk_index: ChunkIndex,
set_index: SetIndex,
) -> Self::Iter;
/// Validates that the view does not break any component borrowing rules.
@ -126,7 +129,12 @@ impl<'a, T: Component> DefaultFilter for Read<T> {
impl<'a, T: Component> View<'a> for Read<T> {
type Iter = RefIter<'a, T, Iter<'a, T>>;
fn fetch(_: &'a ArchetypeData, chunk: &'a ComponentStorage, _: usize) -> Self::Iter {
fn fetch(
_: &'a ArchetypeData,
chunk: &'a ComponentStorage,
_: ChunkIndex,
_: SetIndex,
) -> Self::Iter {
let (slice_borrow, slice) = unsafe {
chunk
.components(ComponentTypeId::of::<T>())
@ -178,7 +186,12 @@ impl<'a, T: Component> DefaultFilter for TryRead<T> {
impl<'a, T: Component> View<'a> for TryRead<T> {
type Iter = TryRefIter<'a, T, Iter<'a, T>>;
fn fetch(_: &'a ArchetypeData, chunk: &'a ComponentStorage, _: usize) -> Self::Iter {
fn fetch(
_: &'a ArchetypeData,
chunk: &'a ComponentStorage,
_: ChunkIndex,
_: SetIndex,
) -> Self::Iter {
unsafe {
chunk
.components(ComponentTypeId::of::<T>())
@ -225,7 +238,12 @@ impl<'a, T: Component> View<'a> for Write<T> {
type Iter = RefIterMut<'a, T, IterMut<'a, T>>;
#[inline]
fn fetch(_: &'a ArchetypeData, chunk: &'a ComponentStorage, _: usize) -> Self::Iter {
fn fetch(
_: &'a ArchetypeData,
chunk: &'a ComponentStorage,
_: ChunkIndex,
_: SetIndex,
) -> Self::Iter {
let (slice_borrow, slice) = unsafe {
chunk
.components(ComponentTypeId::of::<T>())
@ -280,7 +298,12 @@ impl<'a, T: Component> DefaultFilter for TryWrite<T> {
impl<'a, T: Component> View<'a> for TryWrite<T> {
type Iter = TryRefIterMut<'a, T, IterMut<'a, T>>;
fn fetch(_: &'a ArchetypeData, chunk: &'a ComponentStorage, _: usize) -> Self::Iter {
fn fetch(
_: &'a ArchetypeData,
chunk: &'a ComponentStorage,
_: ChunkIndex,
_: SetIndex,
) -> Self::Iter {
unsafe {
chunk
.components(ComponentTypeId::of::<T>())
@ -335,7 +358,8 @@ impl<'a, T: Tag> View<'a> for Tagged<T> {
fn fetch(
archetype: &'a ArchetypeData,
chunk: &'a ComponentStorage,
chunk_index: usize,
_: ChunkIndex,
SetIndex(set_index): SetIndex,
) -> Self::Iter {
let data = unsafe {
archetype
@ -348,7 +372,7 @@ impl<'a, T: Tag> View<'a> for Tagged<T> {
)
})
.data_slice::<T>()
.get_unchecked(chunk_index)
.get_unchecked(set_index)
};
std::iter::repeat(data).take(chunk.len())
}
@ -395,6 +419,10 @@ macro_rules! impl_view_tuple {
impl<$( $ty: ReadOnly ),* > ReadOnly for ($( $ty, )*) {}
impl<$( $ty: ViewElement ),*> ViewElement for ($( $ty, )*) {
type Component = ($( $ty::Component, )*);
}
impl<'a, $( $ty: ViewElement + View<'a> ),* > View<'a> for ($( $ty, )*) {
type Iter = crate::zip::Zip<($( $ty::Iter, )*)>;
@ -402,9 +430,10 @@ macro_rules! impl_view_tuple {
fn fetch(
archetype: &'a ArchetypeData,
chunk: &'a ComponentStorage,
chunk_index: usize,
chunk_index: ChunkIndex,
set_index: SetIndex,
) -> Self::Iter {
crate::zip::multizip(($( $ty::fetch(archetype.clone(), chunk.clone(), chunk_index), )*))
crate::zip::multizip(($( $ty::fetch(archetype.clone(), chunk.clone(), chunk_index, set_index), )*))
}
fn validate() -> bool {
@ -449,26 +478,33 @@ impl_view_tuple!(A, B, C);
impl_view_tuple!(A, B, C, D);
impl_view_tuple!(A, B, C, D, E);
impl_view_tuple!(A, B, C, D, E, F);
impl_view_tuple!(A, B, C, D, E, F, G);
impl_view_tuple!(A, B, C, D, E, F, G, H);
impl_view_tuple!(A, B, C, D, E, F, G, H, I);
impl_view_tuple!(A, B, C, D, E, F, G, H, I, J);
impl_view_tuple!(A, B, C, D, E, F, G, H, I, J, K);
impl_view_tuple!(A, B, C, D, E, F, G, H, I, J, K, L);
/// A type-safe view of a chunk of entities all of the same data layout.
pub struct Chunk<'a, V: for<'b> View<'b>> {
archetype: &'a ArchetypeData,
components: &'a ComponentStorage,
index: usize,
chunk_index: ChunkIndex,
set_index: SetIndex,
view: PhantomData<V>,
}
impl<'a, V: for<'b> View<'b>> Chunk<'a, V> {
pub fn new(archetype: &'a ArchetypeData, set: usize, index: usize) -> Self {
pub fn new(archetype: &'a ArchetypeData, set_index: SetIndex, chunk_index: ChunkIndex) -> Self {
Self {
components: unsafe {
archetype
.chunksets()
.get_unchecked(set)
.get_unchecked(index)
.chunkset_unchecked(set_index)
.chunk_unchecked(chunk_index)
},
archetype,
index: set,
chunk_index,
set_index,
view: PhantomData,
}
}
@ -480,7 +516,12 @@ impl<'a, V: for<'b> View<'b>> Chunk<'a, V> {
/// Get an iterator of all data contained within the chunk.
#[inline]
pub fn iter(&mut self) -> <V as View<'a>>::Iter {
V::fetch(self.archetype, self.components, self.index)
V::fetch(
self.archetype,
self.components,
self.chunk_index,
self.set_index,
)
}
/// Get an iterator of all data and entity IDs contained within the chunk.
@ -488,7 +529,12 @@ impl<'a, V: for<'b> View<'b>> Chunk<'a, V> {
pub fn iter_entities_mut(&mut self) -> ZipEntities<'a, V> {
ZipEntities {
entities: self.entities(),
data: V::fetch(self.archetype, self.components, self.index),
data: V::fetch(
self.archetype,
self.components,
self.chunk_index,
self.set_index,
),
index: 0,
view: PhantomData,
}
@ -500,7 +546,7 @@ impl<'a, V: for<'b> View<'b>> Chunk<'a, V> {
.tags()
.get(TagTypeId::of::<T>())
.map(|tags| unsafe { tags.data_slice::<T>() })
.map(|slice| unsafe { slice.get_unchecked(self.index) })
.map(|slice| unsafe { slice.get_unchecked(*self.set_index) })
}
/// Get a slice of component data.
@ -578,7 +624,11 @@ where
chunk_filter: &'filter FChunk,
archetypes: Enumerate<FArch::Iter>,
set_frontier: Option<(&'data ArchetypeData, Take<Enumerate<FChunkset::Iter>>)>,
chunk_frontier: Option<(&'data ArchetypeData, usize, Take<Enumerate<FChunk::Iter>>)>,
chunk_frontier: Option<(
&'data ArchetypeData,
SetIndex,
Take<Enumerate<FChunk::Iter>>,
)>,
}
impl<'data, 'filter, V, FArch, FChunkset, FChunk>
@ -589,13 +639,13 @@ where
FChunkset: Filter<ChunksetFilterData<'data>>,
FChunk: Filter<ChunkFilterData<'data>>,
{
fn next_set(&mut self) -> Option<(&'data ArchetypeData, usize)> {
fn next_set(&mut self) -> Option<(&'data ArchetypeData, SetIndex)> {
loop {
// if we are looping through an archetype, find the next set
if let Some((ref arch, ref mut chunks)) = self.set_frontier {
for (set_index, filter_data) in chunks {
if self.chunkset_filter.is_match(&filter_data).is_pass() {
return Some((arch, set_index));
return Some((arch, SetIndex(set_index)));
}
}
}
@ -648,14 +698,14 @@ where
if let Some((ref arch, set_index, ref mut set)) = self.chunk_frontier {
for (chunk_index, filter_data) in set {
if self.chunk_filter.is_match(&filter_data).is_pass() {
return Some(Chunk::new(arch, set_index, chunk_index));
return Some(Chunk::new(arch, set_index, ChunkIndex(chunk_index)));
}
}
}
// we have completed the set, find the next
if let Some((ref arch, set_index)) = self.next_set() {
let chunks = unsafe { arch.chunksets().get_unchecked(set_index) }.occupied();
let chunks = unsafe { arch.chunkset_unchecked(set_index) }.occupied();
self.chunk_frontier = Some((
arch,
set_index,
@ -746,7 +796,7 @@ where
/// Queries can be constructed from any `View` type, including tuples of `View`s.
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -763,7 +813,7 @@ where
/// The view determines what data is accessed, and whether it is accessed mutably or not.
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -779,7 +829,7 @@ where
/// types accessed by the view. However, additional filters can be specified if needed:
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -796,7 +846,7 @@ where
/// Filters can be combined with bitwise operators:
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -815,7 +865,7 @@ where
/// Filters can be iterated through to pull data out of a `World`:
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -840,7 +890,7 @@ where
/// This allows you to run code for each tag value, or to retrieve a contiguous data slice.
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -865,7 +915,7 @@ where
#[derivative(Clone(bound = "F: Clone"))]
pub struct Query<V: for<'a> View<'a>, F: EntityFilter> {
view: PhantomData<V>,
pub(crate) filter: F,
pub filter: F,
}
impl<V, F> Query<V, F>
@ -1370,7 +1420,7 @@ where
)>,
chunk_frontier: Option<(
&'data ArchetypeData,
usize,
SetIndex,
FissileEnumerate<FChunk::Iter>,
usize,
)>,
@ -1388,7 +1438,7 @@ where
FChunkset::Iter: FissileIterator,
FChunk::Iter: FissileIterator,
{
fn next_set(&mut self) -> Option<(&'data ArchetypeData, usize)> {
fn next_set(&mut self) -> Option<(&'data ArchetypeData, SetIndex)> {
loop {
// if we are looping through an archetype, find the next set
if let Some((ref arch, ref mut chunks, index_bound)) = self.set_frontier {
@ -1396,7 +1446,7 @@ where
if set_index < index_bound
&& self.chunkset_filter.is_match(&filter_data).is_pass()
{
return Some((arch, set_index));
return Some((arch, SetIndex(set_index)));
}
}
}
@ -1453,14 +1503,14 @@ where
if chunk_index < index_bound
&& self.chunk_filter.is_match(&filter_data).is_pass()
{
return Some(Chunk::new(arch, set_index, chunk_index));
return Some(Chunk::new(arch, set_index, ChunkIndex(chunk_index)));
}
}
}
// we have completed the set, find the next
if let Some((ref arch, set_index)) = self.next_set() {
let chunks = unsafe { arch.chunksets().get_unchecked(set_index) }.occupied();
let chunks = unsafe { arch.chunkset_unchecked(set_index) }.occupied();
self.chunk_frontier = Some((
arch,
set_index,

View file

@ -1,5 +1,6 @@
use crate::{
entity::{Entity, EntityAllocator},
index::{ArchetypeIndex, ChunkIndex, SetIndex},
storage::{
ArchetypeData, ArchetypeDescription, Chunkset, ComponentMeta, ComponentTypeId, TagMeta,
TagStorage, TagTypeId,
@ -246,7 +247,7 @@ struct ArchetypeDescriptionDeserialize<'a, 'b, WD: WorldDeserializer> {
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ArchetypeDescriptionDeserialize<'a, 'b, WD>
{
type Value = usize;
type Value = ArchetypeIndex;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
@ -262,6 +263,7 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
.archetypes()
.iter()
.position(|a| a.description() == &archetype_desc)
.map(ArchetypeIndex)
.unwrap_or_else(|| {
let (idx, _) = storage.alloc_archetype(archetype_desc);
idx
@ -269,7 +271,7 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
}
}
type ChunkSetMapping = HashMap<usize, usize>;
type ChunkSetMapping = HashMap<usize, SetIndex>;
struct TagsDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
@ -306,11 +308,11 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for TagsDeserializ
world_tag_storages
};
let num_world_values = world_tag_storages.iter().map(|ts| ts.len()).nth(0);
let num_world_values = world_tag_storages.iter().map(|ts| ts.len()).next();
let num_tag_values = deserialized_tags
.iter()
.map(|ts| ts.len())
.nth(0)
.next()
.unwrap_or(0);
let mut chunksets_to_add = Vec::new();
for i in 0..num_tag_values {
@ -344,7 +346,7 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for TagsDeserializ
// All temporary TagStorages in `deserialized_tags` will be forgotten later
// because we move data into World when allocating a new chunkset
if let Some(world_idx) = matching_idx {
chunkset_map.insert(i, world_idx);
chunkset_map.insert(i, SetIndex(world_idx));
for tag_idx in 0..tag_types.len() {
unsafe {
let (_, tag_meta) = tag_types[tag_idx];
@ -438,7 +440,7 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
struct ChunkSetDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: usize,
archetype_idx: ArchetypeIndex,
chunkset_map: &'a ChunkSetMapping,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for ChunkSetDeserializer<'a, 'b, WD> {
@ -483,8 +485,8 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for ChunkSetDeserializer<'
struct ChunkListDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: usize,
chunkset_idx: Option<usize>,
archetype_idx: ArchetypeIndex,
chunkset_idx: Option<SetIndex>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ChunkListDeserializer<'a, 'b, WD>
@ -535,8 +537,8 @@ enum ChunkField {
struct ChunkDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: usize,
chunkset_idx: usize,
archetype_idx: ArchetypeIndex,
chunkset_idx: SetIndex,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for ChunkDeserializer<'a, 'b, WD> {
type Value = ();
@ -612,11 +614,11 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> Visitor<'de> for ChunkDeserializer<'a,
struct EntitiesDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: usize,
chunkset_idx: usize,
archetype_idx: ArchetypeIndex,
chunkset_idx: SetIndex,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for EntitiesDeserializer<'a, 'b, WD> {
type Value = Vec<(usize, usize)>;
type Value = Vec<(ChunkIndex, usize)>;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, <D as Deserializer<'de>>::Error>
where
@ -657,9 +659,9 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de> for EntitiesDeseri
struct ComponentsDeserializer<'a, 'b, WD: WorldDeserializer> {
user: &'b WD,
world: &'a mut World,
archetype_idx: usize,
chunkset_idx: usize,
chunk_ranges: &'a Vec<(usize, usize)>,
archetype_idx: ArchetypeIndex,
chunkset_idx: SetIndex,
chunk_ranges: &'a Vec<(ChunkIndex, usize)>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ComponentsDeserializer<'a, 'b, WD>
@ -711,7 +713,7 @@ struct ComponentDataDeserializer<'a, 'b, WD: WorldDeserializer> {
comp_type: &'a ComponentTypeId,
comp_meta: &'a ComponentMeta,
chunkset: &'a mut Chunkset,
chunk_ranges: &'a Vec<(usize, usize)>,
chunk_ranges: &'a Vec<(ChunkIndex, usize)>,
}
impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
for ComponentDataDeserializer<'a, 'b, WD>
@ -728,19 +730,16 @@ impl<'de, 'a, 'b, WD: WorldDeserializer> DeserializeSeed<'de>
self.comp_type,
self.comp_meta,
&mut || -> Option<(NonNull<u8>, usize)> {
self.chunk_ranges.get(range_idx).map(|chunk_range| {
self.chunk_ranges.get(range_idx).map(|&(chunk, size)| {
range_idx += 1;
let chunk = &mut self.chunkset[chunk_range.0];
let chunk = &mut self.chunkset[chunk];
unsafe {
let comp_storage = (&mut *chunk.writer().get().1.get())
.get_mut(*self.comp_type)
.expect(
"expected ComponentResourceSet when deserializing component data",
);
(
comp_storage.writer().reserve_raw(chunk_range.1),
chunk_range.1,
)
(comp_storage.writer().reserve_raw(size), size)
}
})
},

View file

@ -0,0 +1,2 @@
pub mod de;
pub mod ser;

View file

@ -9,13 +9,16 @@ use crate::filter::ChunkFilterData;
use crate::filter::ChunksetFilterData;
use crate::filter::EntityFilter;
use crate::filter::Filter;
use crate::index::ArchetypeIndex;
use crate::index::ChunkIndex;
use crate::index::ComponentIndex;
use crate::index::SetIndex;
use crate::iterator::FissileZip;
use crate::iterator::SliceVecIter;
use crate::world::TagSet;
use crate::world::WorldId;
use derivative::Derivative;
use fxhash::FxHashMap;
use smallvec::Drain;
use smallvec::SmallVec;
use std::any::type_name;
use std::cell::UnsafeCell;
@ -26,8 +29,6 @@ use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::RangeBounds;
use std::ptr::NonNull;
use std::slice::Iter;
use std::slice::IterMut;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use std::sync::Arc;
@ -86,7 +87,6 @@ impl TagTypeId {
pub fn of<T: Component>() -> Self { Self(type_name::<T>(), 0) }
}
/// A `Component` is per-entity data that can be attached to a single entity.
pub trait Component: Send + Sync + 'static {}
@ -207,13 +207,13 @@ impl Storage {
pub(crate) fn alloc_archetype(
&mut self,
desc: ArchetypeDescription,
) -> (usize, &mut ArchetypeData) {
let id = ArchetypeId(self.world_id, self.archetypes.len());
) -> (ArchetypeIndex, &mut ArchetypeData) {
let index = ArchetypeIndex(self.archetypes.len());
let id = ArchetypeId(self.world_id, index);
let archetype = ArchetypeData::new(id, desc);
self.push(archetype);
let index = self.archetypes.len() - 1;
let archetype = &mut self.archetypes[index];
(index, archetype)
}
@ -222,10 +222,10 @@ impl Storage {
let desc = archetype.description();
self.component_types
.0
.push(desc.components.iter().map(|(t, _)| *t));
self.tag_types.0.push(desc.tags.iter().map(|(t, _)| *t));
.push(desc.components.iter().map(|&(t, _)| t));
self.tag_types.0.push(desc.tags.iter().map(|&(t, _)| t));
let index = self.archetypes.len();
let index = ArchetypeIndex(self.archetypes.len());
let archetype_data = ArchetypeFilterData {
component_types: &self.component_types,
tag_types: &self.tag_types,
@ -235,7 +235,7 @@ impl Storage {
trace!(
world = id.world().index(),
archetype = id.index(),
archetype = *id.index(),
components = ?desc.component_names,
tags = ?desc.tag_names,
"Created Archetype"
@ -270,6 +270,46 @@ impl Storage {
) -> std::vec::Drain<ArchetypeData> {
self.archetypes.drain(range)
}
pub(crate) fn archetype(
&self,
ArchetypeIndex(index): ArchetypeIndex,
) -> Option<&ArchetypeData> {
self.archetypes().get(index)
}
pub(crate) fn archetype_mut(
&mut self,
ArchetypeIndex(index): ArchetypeIndex,
) -> Option<&mut ArchetypeData> {
self.archetypes_mut().get_mut(index)
}
pub(crate) unsafe fn archetype_unchecked(
&self,
ArchetypeIndex(index): ArchetypeIndex,
) -> &ArchetypeData {
self.archetypes().get_unchecked(index)
}
pub(crate) unsafe fn archetype_unchecked_mut(
&mut self,
ArchetypeIndex(index): ArchetypeIndex,
) -> &mut ArchetypeData {
self.archetypes_mut().get_unchecked_mut(index)
}
pub(crate) fn chunk(&self, loc: EntityLocation) -> Option<&ComponentStorage> {
self.archetype(loc.archetype())
.and_then(|atd| atd.chunkset(loc.set()))
.and_then(|cs| cs.chunk(loc.chunk()))
}
pub(crate) fn chunk_mut(&mut self, loc: EntityLocation) -> Option<&mut ComponentStorage> {
self.archetype_mut(loc.archetype())
.and_then(|atd| atd.chunkset_mut(loc.set()))
.and_then(|cs| cs.chunk_mut(loc.chunk()))
}
}
/// Stores metadata decribing the type of a tag.
@ -405,19 +445,21 @@ impl<'a> Filter<ArchetypeFilterData<'a>> for ArchetypeDescription {
}
}
const MAX_CHUNK_SIZE: usize = 16 * 1024;
const MAX_CHUNK_SIZE: usize = 16 * 1024 * 10;
const COMPONENT_STORAGE_ALIGNMENT: usize = 64;
/// Unique ID of an archetype.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct ArchetypeId(WorldId, usize);
pub struct ArchetypeId(WorldId, ArchetypeIndex);
impl ArchetypeId {
pub(crate) fn new(world_id: WorldId, index: usize) -> Self { ArchetypeId(world_id, index) }
pub(crate) fn new(world_id: WorldId, index: ArchetypeIndex) -> Self {
ArchetypeId(world_id, index)
}
fn index(self) -> usize { self.1 }
pub fn index(self) -> ArchetypeIndex { self.1 }
fn world(self) -> WorldId { self.0 }
pub fn world(self) -> WorldId { self.0 }
}
/// Contains all of the tags attached to the entities in each chunk.
@ -425,7 +467,7 @@ pub struct Tags(pub(crate) SmallVec<[(TagTypeId, TagStorage); 3]>);
impl Tags {
fn new(mut data: SmallVec<[(TagTypeId, TagStorage); 3]>) -> Self {
data.sort_by_key(|(t, _)| *t);
data.sort_by_key(|&(t, _)| t);
Self(data)
}
@ -439,7 +481,7 @@ impl Tags {
#[inline]
pub fn get(&self, type_id: TagTypeId) -> Option<&TagStorage> {
self.0
.binary_search_by_key(&type_id, |(t, _)| *t)
.binary_search_by_key(&type_id, |&(t, _)| t)
.ok()
.map(|i| unsafe { &self.0.get_unchecked(i).1 })
}
@ -448,19 +490,23 @@ impl Tags {
#[inline]
pub fn get_mut(&mut self, type_id: TagTypeId) -> Option<&mut TagStorage> {
self.0
.binary_search_by_key(&type_id, |(t, _)| *t)
.binary_search_by_key(&type_id, |&(t, _)| t)
.ok()
.map(move |i| unsafe { &mut self.0.get_unchecked_mut(i).1 })
}
pub(crate) fn tag_set(&self, chunk: usize) -> DynamicTagSet {
pub(crate) fn tag_set(&self, SetIndex(index): SetIndex) -> DynamicTagSet {
let mut tags = DynamicTagSet { tags: Vec::new() };
unsafe {
for (type_id, storage) in self.0.iter() {
let (ptr, _, count) = storage.data_raw();
debug_assert!(chunk < count, "chunk index out of bounds");
tags.push(*type_id, *storage.element(), ptr);
for &(type_id, ref storage) in self.0.iter() {
let (ptr, element_size, count) = storage.data_raw();
debug_assert!(index < count, "set index out of bounds");
tags.push(
type_id,
*storage.element(),
NonNull::new(ptr.as_ptr().add(element_size * index)).unwrap(),
);
}
}
@ -497,7 +543,7 @@ impl DynamicTagSet {
.tags
.iter()
.enumerate()
.find(|(_, (t, _, _))| *t == type_id)
.find(|(_, &(t, _, _))| t == type_id)
{
let (_, meta, ptr) = self.tags.remove(i);
unsafe {
@ -516,8 +562,8 @@ impl DynamicTagSet {
impl TagSet for DynamicTagSet {
fn write_tags(&self, tags: &mut Tags) {
for (type_id, meta, ptr) in self.tags.iter() {
let storage = tags.get_mut(*type_id).unwrap();
for &(type_id, ref meta, ptr) in self.tags.iter() {
let storage = tags.get_mut(type_id).unwrap();
unsafe {
if meta.drop_fn.is_some() && !meta.is_zero_sized() {
// clone the value into temp storage then move it into the chunk
@ -572,7 +618,7 @@ impl ArchetypeData {
let tags = desc
.tags
.iter()
.map(|(type_id, meta)| (*type_id, TagStorage::new(*meta)))
.map(|&(type_id, meta)| (type_id, TagStorage::new(meta)))
.collect();
// create component data layout
@ -588,12 +634,12 @@ impl ArchetypeData {
);
let mut data_capacity = 0usize;
let mut component_data_offsets = Vec::new();
for (type_id, meta) in desc.components.iter() {
for &(type_id, meta) in desc.components.iter() {
data_capacity = align_up(
align_up(data_capacity, COMPONENT_STORAGE_ALIGNMENT),
meta.align,
);
component_data_offsets.push((*type_id, data_capacity, *meta));
component_data_offsets.push((type_id, data_capacity, meta));
data_capacity += meta.size * entity_capacity;
}
let data_alignment =
@ -614,10 +660,17 @@ impl ArchetypeData {
}
}
pub(crate) fn delete_all(&mut self) {
for set in &mut self.chunk_sets {
// Clearing the chunk will Drop all the data
set.chunks.clear();
}
}
pub(crate) fn subscribe(&mut self, subscriber: Subscriber) {
self.subscribers.push(subscriber.clone());
for i in 0..self.chunk_sets.len() {
for i in (0..self.chunk_sets.len()).map(SetIndex) {
let filter = ChunksetFilterData {
archetype_data: self,
};
@ -631,7 +684,7 @@ impl ArchetypeData {
pub(crate) fn set_subscribers(&mut self, subscribers: Subscribers) {
self.subscribers = subscribers;
for i in 0..self.chunk_sets.len() {
for i in (0..self.chunk_sets.len()).map(SetIndex) {
let filter = ChunksetFilterData {
archetype_data: self,
};
@ -651,10 +704,10 @@ impl ArchetypeData {
let mut set_match = None;
for self_index in 0..self.chunk_sets.len() {
let mut matches = true;
for (type_id, tags) in self.tags.0.iter() {
for &(type_id, ref tags) in self.tags.0.iter() {
unsafe {
let (self_tag_ptr, size, _) = tags.data_raw();
let (other_tag_ptr, _, _) = other_tags.get(*type_id).unwrap().data_raw();
let (other_tag_ptr, _, _) = other_tags.get(type_id).unwrap().data_raw();
if !tags.element().equals(
self_tag_ptr.as_ptr().add(self_index * size),
@ -675,16 +728,18 @@ impl ArchetypeData {
if let Some(chunk_set) = set_match {
// if we found a match, move the chunks into the set
let target = &mut self.chunk_sets[chunk_set];
for chunk in set.drain(..) {
for mut chunk in set.drain(..) {
chunk.mark_modified();
target.push(chunk);
}
} else {
// if we did not find a match, clone the tags and move the set
set.mark_modified();
self.push(set, |self_tags| {
for (type_id, other_tags) in other_tags.0.iter() {
for &(type_id, ref other_tags) in other_tags.0.iter() {
unsafe {
let (src, _, _) = other_tags.data_raw();
let dst = self_tags.get_mut(*type_id).unwrap().alloc_ptr();
let dst = self_tags.get_mut(type_id).unwrap().alloc_ptr();
other_tags.element().clone(src.as_ptr(), dst);
}
}
@ -695,9 +750,18 @@ impl ArchetypeData {
self.tags.validate(self.chunk_sets.len());
}
pub(crate) fn enumerate_entities<'a>(
/// Iterate all entities in existence by iterating across archetypes, chunk sets, and chunks
pub(crate) fn iter_entities<'a>(&'a self) -> impl Iterator<Item = Entity> + 'a {
self.chunk_sets.iter().flat_map(move |set| {
set.chunks
.iter()
.flat_map(move |chunk| chunk.entities().iter().copied())
})
}
pub(crate) fn iter_entity_locations<'a>(
&'a self,
archetype_index: usize,
archetype_index: ArchetypeIndex,
) -> impl Iterator<Item = (Entity, EntityLocation)> + 'a {
self.chunk_sets
.iter()
@ -711,14 +775,14 @@ impl ArchetypeData {
.entities()
.iter()
.enumerate()
.map(move |(entity_index, entity)| {
.map(move |(entity_index, &entity)| {
(
*entity,
entity,
EntityLocation::new(
archetype_index,
set_index,
chunk_index,
entity_index,
SetIndex(set_index),
ChunkIndex(chunk_index),
ComponentIndex(entity_index),
),
)
})
@ -730,7 +794,7 @@ impl ArchetypeData {
initialize(&mut self.tags);
self.chunk_sets.push(set);
let index = self.chunk_sets.len() - 1;
let index = SetIndex(self.chunk_sets.len() - 1);
let filter = ChunksetFilterData {
archetype_data: self,
};
@ -743,35 +807,39 @@ impl ArchetypeData {
/// Allocates a new chunk set. Returns the index of the new set.
///
/// `initialize` is expected to push the new chunkset's tag values onto the tags collection.
pub(crate) fn alloc_chunk_set<F: FnMut(&mut Tags)>(&mut self, initialize: F) -> usize {
pub(crate) fn alloc_chunk_set<F: FnMut(&mut Tags)>(&mut self, initialize: F) -> SetIndex {
self.push(Chunkset::default(), initialize);
self.chunk_sets.len() - 1
SetIndex(self.chunk_sets.len() - 1)
}
/// Finds a chunk with space free for at least `minimum_space` entities, creating a chunk if needed.
pub(crate) fn get_free_chunk(&mut self, set_index: usize, minimum_space: usize) -> usize {
pub(crate) fn get_free_chunk(
&mut self,
set_index: SetIndex,
minimum_space: usize,
) -> ChunkIndex {
let count = {
let chunks = &mut self.chunk_sets[set_index];
let len = chunks.len();
for (i, chunk) in chunks.iter_mut().enumerate() {
let space_left = chunk.capacity() - chunk.len();
if space_left >= minimum_space {
return i;
return ChunkIndex(i);
}
}
len
ChunkIndex(len)
};
let chunk = self
.component_layout
.alloc_storage(ChunkId(self.id, set_index, count));
unsafe { self.chunk_sets.get_unchecked_mut(set_index).push(chunk) };
unsafe { self.chunkset_unchecked_mut(set_index).push(chunk) };
trace!(
world = self.id.world().index(),
archetype = self.id.index(),
chunkset = set_index,
chunk = count,
archetype = *self.id.index(),
chunkset = *set_index,
chunk = *count,
components = ?self.desc.component_names,
tags = ?self.desc.tag_names,
"Created chunk"
@ -808,13 +876,16 @@ impl ArchetypeData {
) -> bool {
trace!(
world = self.id().world().index(),
archetype = self.id().index(),
archetype = *self.id().index(),
"Defragmenting archetype"
);
let arch_index = self.id.index();
for (i, chunkset) in self.chunk_sets.iter_mut().enumerate() {
let complete = chunkset.defrag(budget, |e, chunk, component| {
on_moved(e, EntityLocation::new(arch_index, i, chunk, component));
on_moved(
e,
EntityLocation::new(arch_index, SetIndex(i), chunk, component),
);
});
if !complete {
return false;
@ -823,6 +894,25 @@ impl ArchetypeData {
true
}
pub(crate) fn chunkset(&self, SetIndex(index): SetIndex) -> Option<&Chunkset> {
self.chunksets().get(index)
}
pub(crate) fn chunkset_mut(&mut self, SetIndex(index): SetIndex) -> Option<&mut Chunkset> {
self.chunksets_mut().get_mut(index)
}
pub(crate) unsafe fn chunkset_unchecked(&self, SetIndex(index): SetIndex) -> &Chunkset {
self.chunksets().get_unchecked(index)
}
pub(crate) unsafe fn chunkset_unchecked_mut(
&mut self,
SetIndex(index): SetIndex,
) -> &mut Chunkset {
self.chunksets_mut().get_unchecked_mut(index)
}
}
fn align_up(addr: usize, align: usize) -> usize { (addr + (align - 1)) & align.wrapping_neg() }
@ -845,9 +935,9 @@ impl ComponentStorageLayout {
let storage_info = self
.data_layout
.iter()
.map(|(ty, _, meta)| {
.map(|&(ty, _, ref meta)| {
(
*ty,
ty,
ComponentResourceSet {
ptr: AtomicRefCell::new(meta.align as *mut u8),
capacity: self.capacity,
@ -867,7 +957,7 @@ impl ComponentStorageLayout {
component_offsets: self
.data_layout
.iter()
.map(|(ty, offset, _)| (*ty, *offset))
.map(|&(ty, offset, _)| (ty, offset))
.collect(),
component_layout: self.alloc_layout,
component_info: UnsafeCell::new(Components::new(storage_info)),
@ -907,7 +997,7 @@ impl Chunkset {
let id = chunk.id();
self.chunks.push(chunk);
let index = self.chunks.len() - 1;
let index = ChunkIndex(self.chunks.len() - 1);
let filter = ChunkFilterData {
chunks: &self.chunks,
};
@ -919,7 +1009,7 @@ impl Chunkset {
pub(crate) fn subscribe(&mut self, subscriber: Subscriber) {
self.subscribers.push(subscriber.clone());
for i in 0..self.chunks.len() {
for i in (0..self.chunks.len()).map(ChunkIndex) {
let filter = ChunkFilterData {
chunks: &self.chunks,
};
@ -933,7 +1023,7 @@ impl Chunkset {
pub(crate) fn set_subscribers(&mut self, subscribers: Subscribers) {
self.subscribers = subscribers;
for i in 0..self.chunks.len() {
for i in (0..self.chunks.len()).map(ChunkIndex) {
let filter = ChunkFilterData {
chunks: &self.chunks,
};
@ -943,6 +1033,12 @@ impl Chunkset {
}
}
fn mark_modified(&mut self) {
for chunk in self.chunks.iter_mut() {
chunk.mark_modified();
}
}
pub(crate) fn drain<R: RangeBounds<usize>>(
&mut self,
range: R,
@ -988,7 +1084,7 @@ impl Chunkset {
/// new component index.
///
/// Returns whether or not the chunkset has been fully defragmented.
fn defrag<F: FnMut(Entity, usize, usize)>(
fn defrag<F: FnMut(Entity, ChunkIndex, ComponentIndex)>(
&mut self,
budget: &mut usize,
mut on_moved: F,
@ -1034,11 +1130,16 @@ impl Chunkset {
*budget -= 1;
// move the last entity
let swapped = source.move_entity(target, source.len() - 1);
let comp_index = ComponentIndex(source.len() - 1);
let swapped = source.move_entity(target, comp_index);
assert!(swapped.is_none());
// notify move
on_moved(*target.entities.last().unwrap(), first, target.len() - 1);
on_moved(
*target.entities.last().unwrap(),
ChunkIndex(first),
comp_index,
);
// exit if we cant move any more
if target.is_full() || source.is_empty() {
@ -1047,22 +1148,47 @@ impl Chunkset {
}
}
}
pub(crate) fn chunk(&self, ChunkIndex(index): ChunkIndex) -> Option<&ComponentStorage> {
self.chunks.get(index)
}
pub(crate) fn chunk_mut(
&mut self,
ChunkIndex(index): ChunkIndex,
) -> Option<&mut ComponentStorage> {
self.chunks.get_mut(index)
}
pub(crate) unsafe fn chunk_unchecked(
&self,
ChunkIndex(index): ChunkIndex,
) -> &ComponentStorage {
self.chunks.get_unchecked(index)
}
pub(crate) unsafe fn chunk_unchecked_mut(
&mut self,
ChunkIndex(index): ChunkIndex,
) -> &mut ComponentStorage {
self.chunks.get_unchecked_mut(index)
}
}
/// Unique ID of a chunk.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct ChunkId(ArchetypeId, usize, usize);
pub struct ChunkId(ArchetypeId, SetIndex, ChunkIndex);
impl ChunkId {
pub(crate) fn new(archetype: ArchetypeId, set: usize, index: usize) -> Self {
pub(crate) fn new(archetype: ArchetypeId, set: SetIndex, index: ChunkIndex) -> Self {
ChunkId(archetype, set, index)
}
pub fn archetype_id(&self) -> ArchetypeId { self.0 }
pub(crate) fn set(&self) -> usize { self.1 }
pub(crate) fn set(&self) -> SetIndex { self.1 }
pub(crate) fn index(&self) -> usize { self.2 }
pub(crate) fn index(&self) -> ChunkIndex { self.2 }
}
/// A set of component slices located on a chunk.
@ -1070,7 +1196,7 @@ pub struct Components(SmallVec<[(ComponentTypeId, ComponentResourceSet); 5]>);
impl Components {
pub(crate) fn new(mut data: SmallVec<[(ComponentTypeId, ComponentResourceSet); 5]>) -> Self {
data.sort_by_key(|(t, _)| *t);
data.sort_by_key(|&(t, _)| t);
Self(data)
}
@ -1078,7 +1204,7 @@ impl Components {
#[inline]
pub fn get(&self, type_id: ComponentTypeId) -> Option<&ComponentResourceSet> {
self.0
.binary_search_by_key(&type_id, |(t, _)| *t)
.binary_search_by_key(&type_id, |&(t, _)| t)
.ok()
.map(|i| unsafe { &self.0.get_unchecked(i).1 })
}
@ -1087,16 +1213,24 @@ impl Components {
#[inline]
pub fn get_mut(&mut self, type_id: ComponentTypeId) -> Option<&mut ComponentResourceSet> {
self.0
.binary_search_by_key(&type_id, |(t, _)| *t)
.binary_search_by_key(&type_id, |&(t, _)| t)
.ok()
.map(move |i| unsafe { &mut self.0.get_unchecked_mut(i).1 })
}
fn iter(&mut self) -> Iter<(ComponentTypeId, ComponentResourceSet)> { self.0.iter() }
fn iter(&mut self) -> impl Iterator<Item = &(ComponentTypeId, ComponentResourceSet)> + '_ {
self.0.iter()
}
fn iter_mut(&mut self) -> IterMut<(ComponentTypeId, ComponentResourceSet)> { self.0.iter_mut() }
fn iter_mut(
&mut self,
) -> impl Iterator<Item = &mut (ComponentTypeId, ComponentResourceSet)> + '_ {
self.0.iter_mut()
}
fn drain(&mut self) -> Drain<(ComponentTypeId, ComponentResourceSet)> { self.0.drain() }
fn drain(&mut self) -> impl Iterator<Item = (ComponentTypeId, ComponentResourceSet)> + '_ {
self.0.drain(..)
}
}
/// Stores a chunk of entities and their component data of a specific data layout.
@ -1125,10 +1259,10 @@ impl<'a> StorageWriter<'a> {
impl<'a> Drop for StorageWriter<'a> {
fn drop(&mut self) {
self.storage.update_count_gauge();
for entity in self.storage.entities.iter().skip(self.initial_count) {
for &entity in self.storage.entities.iter().skip(self.initial_count) {
self.storage
.subscribers
.send(Event::EntityInserted(*entity, self.storage.id()));
.send(Event::EntityInserted(entity, self.storage.id()));
}
}
}
@ -1168,10 +1302,25 @@ impl ComponentStorage {
unsafe { &*self.component_info.get() }.get(component_type)
}
/// Increments all component versions, forcing the chunk to be seen as modified for all queries.
fn mark_modified(&mut self) {
unsafe {
let components = &mut *self.component_info.get();
for (_, component) in components.iter_mut() {
// touch each slice mutably to increment its version
let _ = component.data_raw_mut();
}
}
}
/// Removes an entity from the chunk by swapping it with the last entry.
///
/// Returns the ID of the entity which was swapped into the removed entity's position.
pub fn swap_remove(&mut self, index: usize, drop: bool) -> Option<Entity> {
pub fn swap_remove(
&mut self,
ComponentIndex(index): ComponentIndex,
drop: bool,
) -> Option<Entity> {
let removed = self.entities.swap_remove(index);
for (_, component) in unsafe { &mut *self.component_info.get() }.iter_mut() {
component.writer().swap_remove(index, drop);
@ -1196,16 +1345,20 @@ impl ComponentStorage {
/// the target chunk. Any components left over will be dropped.
///
/// Returns the ID of the entity which was swapped into the removed entity's position.
pub fn move_entity(&mut self, target: &mut ComponentStorage, index: usize) -> Option<Entity> {
debug_assert!(index < self.len());
pub fn move_entity(
&mut self,
target: &mut ComponentStorage,
index: ComponentIndex,
) -> Option<Entity> {
debug_assert!(*index < self.len());
debug_assert!(!target.is_full());
if !target.is_allocated() {
target.allocate();
}
trace!(index, source = ?self.id, destination = ?target.id, "Moving entity");
trace!(index = *index, source = ?self.id, destination = ?target.id, "Moving entity");
let entity = unsafe { *self.entities.get_unchecked(index) };
let entity = unsafe { *self.entities.get_unchecked(*index) };
target.entities.push(entity);
let self_components = unsafe { &mut *self.component_info.get() };
@ -1214,9 +1367,9 @@ impl ComponentStorage {
for (comp_type, accessor) in self_components.iter_mut() {
if let Some(target_accessor) = target_components.get_mut(*comp_type) {
// move the component into the target chunk
let (ptr, element_size, _) = accessor.data_raw();
unsafe {
let component = ptr.add(element_size * index);
let (ptr, element_size, _) = accessor.data_raw();
let component = ptr.add(element_size * *index);
target_accessor
.writer()
.push_raw(NonNull::new_unchecked(component), 1);
@ -1257,9 +1410,9 @@ impl ComponentStorage {
trace!(
world = self.id.archetype_id().world().index(),
archetype = self.id.archetype_id().index(),
chunkset = self.id.set(),
chunk = self.id.index(),
archetype = *self.id.archetype_id().index(),
chunkset = *self.id.set(),
chunk = *self.id.index(),
layout = ?self.component_layout,
"Freeing chunk memory"
);
@ -1283,9 +1436,9 @@ impl ComponentStorage {
trace!(
world = self.id.archetype_id().world().index(),
archetype = self.id.archetype_id().index(),
chunkset = self.id.set(),
chunk = self.id.index(),
archetype = *self.id.archetype_id().index(),
chunkset = *self.id.set(),
chunk = *self.id.index(),
layout = ?self.component_layout,
"Allocating chunk memory"
);
@ -1298,8 +1451,8 @@ impl ComponentStorage {
// update accessor pointers
for (type_id, component) in (&mut *self.component_info.get()).iter_mut() {
let offset = self.component_offsets.get(type_id).unwrap();
*component.ptr.get_mut() = ptr.add(*offset);
let &offset = self.component_offsets.get(type_id).unwrap();
*component.ptr.get_mut() = ptr.add(offset);
}
}
@ -1356,6 +1509,12 @@ impl Drop for ComponentStorage {
}
}
for e in &self.entities {
self.subscribers.send(Event::EntityRemoved(*e, self.id()));
}
self.update_count_gauge();
// free the chunk's memory
unsafe {
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
@ -1385,12 +1544,10 @@ impl ComponentResourceSet {
///
/// # Safety
///
/// Access to the component data within the slice is runtime borrow checked.
/// This call will panic if borrowing rules are broken.
pub fn data_raw(&self) -> (Ref<*mut u8>, usize, usize) {
(self.ptr.get(), self.element_size, unsafe {
*self.count.get()
})
/// Access to the component data within the slice is runtime borrow checked in debug builds.
/// This call will panic if borrowing rules are broken in debug, and is undefined behavior in release.
pub unsafe fn data_raw(&self) -> (Ref<*mut u8>, usize, usize) {
(self.ptr.get(), self.element_size, *self.count.get())
}
/// Gets a raw pointer to the start of the component slice.
@ -1399,21 +1556,19 @@ impl ComponentResourceSet {
///
/// # Safety
///
/// Access to the component data within the slice is runtime borrow checked.
/// This call will panic if borrowing rules are broken.
/// Access to the component data within the slice is runtime borrow checked in debug builds.
/// This call will panic if borrowing rules are broken in debug, and is undefined behavior in release.
///
/// # Panics
///
/// Will panic when an internal u64 counter overflows.
/// It will happen in 50000 years if you do 10000 mutations a millisecond.
pub fn data_raw_mut(&self) -> (RefMut<*mut u8>, usize, usize) {
pub unsafe fn data_raw_mut(&self) -> (RefMut<*mut u8>, usize, usize) {
// this version increment is not thread safe
// - but the pointer `get_mut` ensures exclusive access at runtime
let ptr = self.ptr.get_mut();
unsafe {
*self.version.get() = next_version();
};
(ptr, self.element_size, unsafe { *self.count.get() })
*self.version.get() = next_version();
(ptr, self.element_size, *self.count.get())
}
/// Gets a shared reference to the slice of components.
@ -1426,7 +1581,7 @@ impl ComponentResourceSet {
/// This call will panic if borrowing rules are broken.
pub unsafe fn data_slice<T>(&self) -> RefMap<&[T]> {
let (ptr, _size, count) = self.data_raw();
ptr.map_into(|ptr| std::slice::from_raw_parts(*ptr as *const _ as *const T, count))
ptr.map_into(|&ptr| std::slice::from_raw_parts(ptr as *const _ as *const T, count))
}
/// Gets a mutable reference to the slice of components.
@ -1444,7 +1599,7 @@ impl ComponentResourceSet {
/// It will happen in 50000 years if you do 10000 mutations a millisecond.
pub unsafe fn data_slice_mut<T>(&self) -> RefMapMut<&mut [T]> {
let (ptr, _size, count) = self.data_raw_mut();
ptr.map_into(|ptr| std::slice::from_raw_parts_mut(*ptr as *mut _ as *mut T, count))
ptr.map_into(|&mut ptr| std::slice::from_raw_parts_mut(ptr as *mut _ as *mut T, count))
}
/// Creates a writer for pushing components into or removing from the vec.
@ -1572,7 +1727,7 @@ impl<'a> ComponentWriter<'a> {
/// # Safety
///
/// Ensure that this function is only ever called once on a given index.
pub unsafe fn drop_in_place(&mut self, index: usize) {
pub unsafe fn drop_in_place(&mut self, ComponentIndex(index): ComponentIndex) {
if let Some(drop_fn) = self.accessor.drop_fn {
let size = self.accessor.element_size;
let to_remove = self.ptr.add(size * index);
@ -1805,10 +1960,9 @@ mod test {
let chunk_index = data.get_free_chunk(set, 1);
let components = data
.chunksets_mut()
.get_mut(set)
.chunkset_mut(set)
.unwrap()
.get_mut(chunk_index)
.chunk_mut(chunk_index)
.unwrap();
let mut writer = components.writer();
let (chunk_entities, chunk_components) = writer.get();
@ -1840,10 +1994,9 @@ mod test {
let chunk_index = data.get_free_chunk(set, 1);
let chunk = data
.chunksets_mut()
.get_mut(set)
.chunkset_mut(set)
.unwrap()
.get_mut(chunk_index)
.chunk_mut(chunk_index)
.unwrap();
assert!(!chunk.is_allocated());
@ -1870,10 +2023,9 @@ mod test {
let chunk_index = data.get_free_chunk(set, 1);
let chunk = data
.chunksets_mut()
.get_mut(set)
.chunkset_mut(set)
.unwrap()
.get_mut(chunk_index)
.chunk_mut(chunk_index)
.unwrap();
assert!(!chunk.is_allocated());
@ -1894,7 +2046,7 @@ mod test {
assert!(chunk.is_allocated());
chunk.swap_remove(0, true);
chunk.swap_remove(ComponentIndex(0), true);
assert!(!chunk.is_allocated());
}
@ -1914,10 +2066,9 @@ mod test {
let set = data.alloc_chunk_set(|_| {});
let chunk_index = data.get_free_chunk(set, 1);
let components = data
.chunksets_mut()
.get_mut(set)
.chunkset_mut(set)
.unwrap()
.get_mut(chunk_index)
.chunk_mut(chunk_index)
.unwrap();
let entities = [
@ -2045,10 +2196,9 @@ mod test {
let chunk_index = data.get_free_chunk(set, 1);
let components = data
.chunksets_mut()
.get_mut(set)
.chunkset_mut(set)
.unwrap()
.get_mut(chunk_index)
.chunk_mut(chunk_index)
.unwrap();
let mut writer = components.writer();
let (chunk_entities, chunk_components) = writer.get();
@ -2080,10 +2230,9 @@ mod test {
let chunk_index = data.get_free_chunk(set, 1);
let components = data
.chunksets_mut()
.get_mut(set)
.chunkset_mut(set)
.unwrap()
.get_mut(chunk_index)
.chunk_mut(chunk_index)
.unwrap();
let mut writer = components.writer();
let (chunk_entities, chunk_components) = writer.get();

View file

@ -0,0 +1,35 @@
[package]
name = "legion-systems"
version = "0.2.1"
description = "High performance entity component system (ECS) library"
authors = ["Thomas Gillen <thomas.gillen@googlemail.com>"]
repository = "https://github.com/TomGillen/legion"
keywords = ["ecs", "game"]
categories = ["game-engines", "data-structures"]
readme = "readme.md"
license = "MIT"
edition = "2018"
[badges]
travis-ci = { repository = "TomGillen/legion", branch = "master" }
[features]
par-iter = ["rayon", "legion-core/par-iter"]
par-schedule = ["rayon", "crossbeam-queue"]
[dependencies]
legion-core = { path = "../legion_core", version = "0.2.1", default-features = false }
downcast-rs = "1.0"
itertools = "0.8"
rayon = { version = "1.2", optional = true }
crossbeam-queue = { version = "0.2.0", optional = true }
crossbeam-channel = "0.4.0"
derivative = "1"
bit-set = "0.5"
paste = "0.1"
tracing = "0.1"
fxhash = "0.2"
[dev-dependencies]
tracing-subscriber = "0.2"

View file

@ -0,0 +1,16 @@
pub mod resource;
pub mod schedule;
mod system;
pub use bit_set;
pub use system::*;
pub mod prelude {
pub use crate::{
bit_set::BitSet,
resource::{ResourceSet, Resources},
schedule::{Executor, Runnable, Schedulable, Schedule},
System, SystemBuilder,
};
}

View file

@ -1,13 +1,42 @@
use crate::borrow::{AtomicRefCell, Ref, RefMut, DowncastTypename};
use crate::query::{Read, ReadOnly, Write};
use legion_core::borrow::DowncastTypename;
use downcast_rs::{impl_downcast, Downcast};
use fxhash::FxHashMap;
use legion_core::borrow::{AtomicRefCell, Ref, RefMut};
use legion_core::query::{Read, ReadOnly, Write};
use std::{
any::type_name,
any::{Any, type_name},
marker::PhantomData,
ops::{Deref, DerefMut},
};
impl DowncastTypename for dyn Resource {
#[inline(always)]
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T> {
if self.is_typename::<T>() {
// SAFETY: just checked whether we are pointing to the correct type
unsafe { Some(&mut *(self.as_any_mut() as *mut dyn Any as *mut T)) }
} else {
None
}
}
#[inline(always)]
fn downcast_typename_ref<T: Any>(&self) -> Option<&T> {
if self.is_typename::<T>() {
// SAFETY: just checked whether we are pointing to the correct type
unsafe { Some(&*(self.as_any() as *const dyn Any as *const T)) }
} else {
None
}
}
#[inline(always)]
fn is_typename<T: Any>(&self) -> bool {
true
// TODO: it would be nice to add type safety here, but the type names don't match
// println!("{} {}", type_name_of_val(self), type_name::<T>());
// type_name_of_val(self) == type_name::<T>()
}
}
#[cfg(not(feature = "ffi"))]
/// A type ID identifying a component type.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
@ -39,7 +68,8 @@ impl ResourceTypeId {
/// struct TypeA(usize);
/// struct TypeB(usize);
///
/// use legion::prelude::*;
/// use legion_core::prelude::*;
/// use legion_systems::prelude::*;
/// let mut resources = Resources::default();
/// resources.insert(TypeA(55));
/// resources.insert(TypeB(12));

View file

@ -1,9 +1,15 @@
use crate::system::SystemId;
use crate::{
borrow::RefMut, command::CommandBuffer, resource::ResourceTypeId, storage::ComponentTypeId,
world::World,
resource::{ResourceTypeId, Resources},
system::SystemId,
};
use bit_set::BitSet;
use legion_core::{
borrow::RefMut,
command::CommandBuffer,
storage::ComponentTypeId,
world::{World, WorldId},
};
use std::cell::UnsafeCell;
#[cfg(feature = "par-schedule")]
use tracing::{span, trace, Level};
@ -52,13 +58,38 @@ impl ArchetypeAccess {
/// Trait describing a schedulable type. This is implemented by `System`
pub trait Runnable {
/// Gets the name of the system.
fn name(&self) -> &SystemId;
/// Gets the resources and component types read by the system.
fn reads(&self) -> (&[ResourceTypeId], &[ComponentTypeId]);
/// Gets the resources and component types written by the system.
fn writes(&self) -> (&[ResourceTypeId], &[ComponentTypeId]);
/// Prepares the system for execution against a world.
fn prepare(&mut self, world: &World);
/// Gets the set of archetypes the system will access when run,
/// as determined when the system was last prepared.
fn accesses_archetypes(&self) -> &ArchetypeAccess;
fn run(&self, world: &World);
fn command_buffer_mut(&self) -> RefMut<CommandBuffer>;
/// Runs the system.
///
/// # Safety
///
/// The shared references to world and resources may result in
/// unsound mutable aliasing if other code is accessing the same components or
/// resources as this system. Prefer to use `run` when possible.
unsafe fn run_unsafe(&mut self, world: &World, resources: &Resources);
/// Gets the system's command buffer.
fn command_buffer_mut(&self, world: WorldId) -> Option<RefMut<CommandBuffer>>;
/// Runs the system.
fn run(&mut self, world: &mut World, resources: &mut Resources) {
unsafe { self.run_unsafe(world, resources) };
}
}
/// Executes a sequence of systems, potentially in parallel, and then commits their command buffers.
@ -67,7 +98,7 @@ pub trait Runnable {
/// may run some systems in parallel. The order in which side-effects (e.g. writes to resources
/// or entities) are observed is maintained.
pub struct Executor {
systems: Vec<Box<dyn Schedulable>>,
systems: Vec<SystemBox>,
#[cfg(feature = "par-schedule")]
static_dependants: Vec<Vec<usize>>,
#[cfg(feature = "par-schedule")]
@ -78,13 +109,38 @@ pub struct Executor {
awaiting: Vec<AtomicUsize>,
}
struct SystemBox(UnsafeCell<Box<dyn Schedulable>>);
// NOT SAFE:
// This type is only safe to use as Send and Sync within
// the constraints of how it is used inside Executor
unsafe impl Send for SystemBox {}
unsafe impl Sync for SystemBox {}
impl SystemBox {
#[cfg(feature = "par-schedule")]
unsafe fn get(&self) -> &dyn Schedulable { std::ops::Deref::deref(&*self.0.get()) }
#[allow(clippy::mut_from_ref)]
unsafe fn get_mut(&self) -> &mut dyn Schedulable {
std::ops::DerefMut::deref_mut(&mut *self.0.get())
}
}
impl Executor {
/// Constructs a new executor for all systems to be run in a single stage.
///
/// Systems are provided in the order in which side-effects (e.g. writes to resources or entities)
/// are to be observed.
#[cfg(not(feature = "par-schedule"))]
pub fn new(systems: Vec<Box<dyn Schedulable>>) -> Self { Self { systems } }
pub fn new(systems: Vec<Box<dyn Schedulable>>) -> Self {
Self {
systems: systems
.into_iter()
.map(|s| SystemBox(UnsafeCell::new(s)))
.collect(),
}
}
/// Constructs a new executor for all systems to be run in a single stage.
///
@ -208,7 +264,10 @@ impl Executor {
static_dependants,
dynamic_dependants,
static_dependency_counts,
systems,
systems: systems
.into_iter()
.map(|s| SystemBox(UnsafeCell::new(s)))
.collect(),
}
} else {
Executor {
@ -216,17 +275,22 @@ impl Executor {
static_dependants: Vec::with_capacity(0),
dynamic_dependants: Vec::with_capacity(0),
static_dependency_counts: Vec::with_capacity(0),
systems,
systems: systems
.into_iter()
.map(|s| SystemBox(UnsafeCell::new(s)))
.collect(),
}
}
}
/// Converts this executor into a vector of its component systems.
pub fn into_vec(self) -> Vec<Box<dyn Schedulable>> { self.systems }
pub fn into_vec(self) -> Vec<Box<dyn Schedulable>> {
self.systems.into_iter().map(|s| s.0.into_inner()).collect()
}
/// Executes all systems and then flushes their command buffers.
pub fn execute(&mut self, world: &mut World) {
self.run_systems(world);
pub fn execute(&mut self, world: &mut World, resources: &mut Resources) {
self.run_systems(world, resources);
self.flush_command_buffers(world);
}
@ -234,15 +298,10 @@ impl Executor {
///
/// Only enabled with par-schedule is disabled
#[cfg(not(feature = "par-schedule"))]
pub fn run_systems(&mut self, world: &mut World) {
// preflush command buffers
// This also handles the first case of allocating them.
self.systems
.iter()
.for_each(|system| system.command_buffer_mut().write(world));
pub fn run_systems(&mut self, world: &mut World, resources: &mut Resources) {
self.systems.iter_mut().for_each(|system| {
system.run(world);
let system = unsafe { system.get_mut() };
system.run(world, resources);
});
}
@ -253,19 +312,14 @@ impl Executor {
///
/// Call from within `rayon::ThreadPool::install()` to execute within a specific thread pool.
#[cfg(feature = "par-schedule")]
pub fn run_systems(&mut self, world: &mut World) {
// preflush command buffers
// This also handles the first case of allocating them.
self.systems
.iter()
.for_each(|system| system.command_buffer_mut().write(world));
pub fn run_systems(&mut self, world: &mut World, resources: &mut Resources) {
rayon::join(
|| {},
|| {
match self.systems.len() {
1 => {
self.systems[0].run(world);
// safety: we have exlusive access to all systems, world and resources here
unsafe { self.systems[0].get_mut().run(world, resources) };
}
_ => {
let systems = &mut self.systems;
@ -273,7 +327,9 @@ impl Executor {
let awaiting = &mut self.awaiting;
// prepare all systems - archetype filters are pre-executed here
systems.par_iter_mut().for_each(|sys| sys.prepare(world));
systems
.par_iter_mut()
.for_each(|sys| unsafe { sys.get_mut() }.prepare(world));
// determine dynamic dependencies
izip!(
@ -283,10 +339,11 @@ impl Executor {
)
.par_bridge()
.for_each(|(sys, static_dep, dyn_dep)| {
let archetypes = sys.accesses_archetypes();
// safety: systems is held exclusively, and we are only reading each system
let archetypes = unsafe { sys.get() }.accesses_archetypes();
for i in (0..dyn_dep.len()).rev() {
let dep = dyn_dep[i];
let other = &systems[dep];
let other = unsafe { systems[dep].get() };
// if the archetype sets intersect,
// then we can move the dynamic dependant into the static dependants set
@ -309,7 +366,9 @@ impl Executor {
(0..systems.len())
.filter(|i| awaiting[*i].load(Ordering::SeqCst) == 0)
.for_each(|i| {
self.run_recursive(i, world);
// safety: we are at the root of the execution tree, so we know each
// index is exclusive here
unsafe { self.run_recursive(i, world, resources) };
});
}
}
@ -320,14 +379,23 @@ impl Executor {
/// Flushes the recorded command buffers for all systems.
pub fn flush_command_buffers(&mut self, world: &mut World) {
self.systems.iter().for_each(|system| {
system.command_buffer_mut().write(world);
// safety: systems are exlcusive due to &mut self
let system = unsafe { system.get_mut() };
if let Some(mut cmd) = system.command_buffer_mut(world.id()) {
cmd.write(world);
}
});
}
/// Recursively execute through the generated depedency cascade and exhaust it.
///
/// # Safety
///
/// Ensure the system indexed by `i` is only accessed once.
#[cfg(feature = "par-schedule")]
fn run_recursive(&self, i: usize, world: &World) {
self.systems[i].run(world);
unsafe fn run_recursive(&self, i: usize, world: &World, resources: &Resources) {
// safety: the caller ensures nothing else is accessing systems[i]
self.systems[i].get_mut().run_unsafe(world, resources);
self.static_dependants[i].par_iter().for_each(|dep| {
match self.awaiting[*dep].compare_exchange(
@ -337,7 +405,8 @@ impl Executor {
Ordering::Relaxed,
) {
Ok(_) => {
self.run_recursive(*dep, world);
// safety: each dependency is unique, so run_recursive is safe to call
self.run_recursive(*dep, world, resources);
}
Err(_) => {
self.awaiting[*dep].fetch_sub(1, Ordering::Relaxed);
@ -378,18 +447,21 @@ impl Builder {
}
/// Adds a thread local function to the schedule. This function will be executed on the main thread.
pub fn add_thread_local_fn<F: FnMut(&mut World) + 'static>(mut self, f: F) -> Self {
pub fn add_thread_local_fn<F: FnMut(&mut World, &mut Resources) + 'static>(
mut self,
f: F,
) -> Self {
self.finalize_executor();
self.steps.push(Step::ThreadLocalFn(
Box::new(f) as Box<dyn FnMut(&mut World)>
Box::new(f) as Box<dyn FnMut(&mut World, &mut Resources)>
));
self
}
/// Adds a thread local system to the schedule. This system will be executed on the main thread.
pub fn add_thread_local<S: Into<Box<dyn Runnable>>>(self, system: S) -> Self {
let system = system.into();
self.add_thread_local_fn(move |world| system.run(world))
let mut system = system.into();
self.add_thread_local_fn(move |world, resources| system.run(world, resources))
}
/// Finalizes the builder into a `Schedule`.
@ -412,7 +484,7 @@ pub enum Step {
/// Flush system command buffers.
FlushCmdBuffers,
/// A thread local function.
ThreadLocalFn(Box<dyn FnMut(&mut World)>),
ThreadLocalFn(Box<dyn FnMut(&mut World, &mut Resources)>),
}
/// A schedule of systems for execution.
@ -420,11 +492,13 @@ pub enum Step {
/// # Examples
///
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # use legion_systems::prelude::*;
/// # let find_collisions = SystemBuilder::new("find_collisions").build(|_,_,_,_| {});
/// # let calculate_acceleration = SystemBuilder::new("calculate_acceleration").build(|_,_,_,_| {});
/// # let update_positions = SystemBuilder::new("update_positions").build(|_,_,_,_| {});
/// # let mut world = World::new();
/// let mut world = World::new();
/// let mut resources = Resources::default();
/// let mut schedule = Schedule::builder()
/// .add_system(find_collisions)
/// .flush()
@ -432,7 +506,7 @@ pub enum Step {
/// .add_system(update_positions)
/// .build();
///
/// schedule.execute(&mut world);
/// schedule.execute(&mut world, &mut resources);
/// ```
pub struct Schedule {
steps: Vec<Step>,
@ -443,18 +517,18 @@ impl Schedule {
pub fn builder() -> Builder { Builder::default() }
/// Executes all of the steps in the schedule.
pub fn execute(&mut self, world: &mut World) {
pub fn execute(&mut self, world: &mut World, resources: &mut Resources) {
let mut waiting_flush: Vec<&mut Executor> = Vec::new();
for step in &mut self.steps {
match step {
Step::Systems(executor) => {
executor.run_systems(world);
executor.run_systems(world, resources);
waiting_flush.push(executor);
}
Step::FlushCmdBuffers => waiting_flush
.drain(..)
.for_each(|e| e.flush_command_buffers(world)),
Step::ThreadLocalFn(function) => function(world),
Step::ThreadLocalFn(function) => function(world, resources),
}
}
}
@ -480,6 +554,7 @@ mod tests {
use super::*;
use crate::prelude::*;
use itertools::sorted;
use legion_core::prelude::*;
use std::sync::{Arc, Mutex};
#[test]
@ -490,7 +565,8 @@ mod tests {
#[derive(Default)]
struct Resource;
world.resources.insert(Resource);
let mut resources = Resources::default();
resources.insert(Resource);
let order = Arc::new(Mutex::new(Vec::new()));
@ -513,7 +589,7 @@ mod tests {
.add_system(system_three)
.build();
schedule.execute(&mut world);
schedule.execute(&mut world, &mut resources);
let order = order.lock().unwrap();
let sorted: Vec<usize> = sorted(order.clone()).collect();
@ -524,12 +600,13 @@ mod tests {
fn flush() {
let universe = Universe::new();
let mut world = universe.create_world();
let mut resources = Resources::default();
#[derive(Clone, Copy, Debug, PartialEq)]
struct TestComp(f32, f32, f32);
let system_one = SystemBuilder::new("one").build(move |cmd, _, _, _| {
cmd.insert((), vec![(TestComp(0., 0., 0.),)]).unwrap();
cmd.insert((), vec![(TestComp(0., 0., 0.),)]);
});
let system_two = SystemBuilder::new("two")
.with_query(Write::<TestComp>::query())
@ -545,6 +622,6 @@ mod tests {
.add_system(system_three)
.build();
schedule.execute(&mut world);
schedule.execute(&mut world, &mut resources);
}
}

View file

@ -1,31 +1,32 @@
use crate::borrow::{AtomicRefCell, Ref, RefMut};
use crate::command::CommandBuffer;
use crate::cons::{ConsAppend, ConsFlatten};
use crate::entity::Entity;
use crate::filter::EntityFilter;
use crate::query::ReadOnly;
use crate::query::{ChunkDataIter, ChunkEntityIter, ChunkViewIter, Query, Read, View, Write};
use crate::resource::{Resource, ResourceSet, ResourceTypeId};
use crate::resource::{Resource, ResourceSet, ResourceTypeId, Resources};
use crate::schedule::ArchetypeAccess;
use crate::schedule::{Runnable, Schedulable};
use crate::storage::Tag;
use crate::storage::{Component, ComponentTypeId, TagTypeId};
use crate::world::World;
use bit_set::BitSet;
use derivative::Derivative;
use fxhash::FxHashMap;
use legion_core::borrow::{AtomicRefCell, Ref, RefMut};
use legion_core::command::CommandBuffer;
use legion_core::cons::{ConsAppend, ConsFlatten};
use legion_core::entity::Entity;
use legion_core::filter::EntityFilter;
use legion_core::index::ArchetypeIndex;
use legion_core::query::ReadOnly;
use legion_core::query::{ChunkDataIter, ChunkEntityIter, ChunkViewIter, Query, Read, View, Write};
use legion_core::storage::Tag;
use legion_core::storage::{Component, ComponentTypeId, TagTypeId};
use legion_core::world::World;
use legion_core::world::WorldId;
use std::any::TypeId;
use std::borrow::Cow;
use std::marker::PhantomData;
use tracing::{debug, info, span, Level};
#[cfg(feature = "par-iter")]
use crate::filter::{ArchetypeFilterData, ChunkFilterData, ChunksetFilterData, Filter};
#[cfg(feature = "par-iter")]
use crate::iterator::FissileIterator;
#[cfg(feature = "par-iter")]
use crate::query::Chunk;
use legion_core::{
filter::{ArchetypeFilterData, ChunkFilterData, ChunksetFilterData, Filter},
iterator::FissileIterator,
query::Chunk,
};
/// Structure used by `SystemAccess` for describing access to the provided `T`
#[derive(Derivative, Debug, Clone)]
@ -559,7 +560,7 @@ macro_rules! impl_queryset_tuple {
$(
let storage = world.storage();
$ty.filter.iter_archetype_indexes(storage).for_each(|id| { bitset.insert(id); });
$ty.filter.iter_archetype_indexes(storage).for_each(|ArchetypeIndex(id)| { bitset.insert(id); });
)*
}
unsafe fn prepare(&mut self) -> Self::Queries {
@ -585,9 +586,11 @@ where
type Queries = SystemQuery<AV, AF>;
fn filter_archetypes(&mut self, world: &World, bitset: &mut BitSet) {
let storage = world.storage();
self.filter.iter_archetype_indexes(storage).for_each(|id| {
bitset.insert(id);
});
self.filter
.iter_archetype_indexes(storage)
.for_each(|ArchetypeIndex(id)| {
bitset.insert(id);
});
}
unsafe fn prepare(&mut self) -> Self::Queries { SystemQuery::<AV, AF>::new(self) }
}
@ -652,9 +655,8 @@ impl SubWorld {
fn validate_archetype_access(&self, entity: Entity) -> bool {
unsafe {
if let Some(archetypes) = self.archetypes {
if let Some(location) = (*self.world).entity_allocator.get_location(entity.index())
{
return (*archetypes).contains(location.archetype());
if let Some(location) = (*self.world).get_entity_location(entity) {
return (*archetypes).contains(*location.archetype());
}
}
}
@ -806,7 +808,7 @@ where
>,
{
name: SystemId,
resources: R,
_resources: PhantomData<R>,
queries: AtomicRefCell<Q>,
run_fn: AtomicRefCell<F>,
archetypes: ArchetypeAccess,
@ -816,7 +818,7 @@ where
access: SystemAccess,
// We pre-allocate a command buffer for ourself. Writes are self-draining so we never have to rellocate.
command_buffer: AtomicRefCell<CommandBuffer>,
command_buffer: FxHashMap<WorldId, AtomicRefCell<CommandBuffer>>,
}
impl<R, Q, F> Runnable for System<R, Q, F>
@ -848,28 +850,29 @@ where
fn accesses_archetypes(&self) -> &ArchetypeAccess { &self.archetypes }
fn command_buffer_mut(&self) -> RefMut<CommandBuffer> { self.command_buffer.get_mut() }
fn command_buffer_mut(&self, world: WorldId) -> Option<RefMut<CommandBuffer>> {
self.command_buffer.get(&world).map(|cmd| cmd.get_mut())
}
fn run(&self, world: &World) {
unsafe fn run_unsafe(&mut self, world: &World, resources: &Resources) {
let span = span!(Level::INFO, "System", system = %self.name);
let _guard = span.enter();
debug!("Initializing");
let mut resources = unsafe { R::fetch_unchecked(&world.resources) };
let mut resources = R::fetch_unchecked(resources);
let mut queries = self.queries.get_mut();
let mut prepared_queries = unsafe { queries.prepare() };
let mut world_shim =
unsafe { SubWorld::new(world, &self.access.components, &self.archetypes) };
// Give the command buffer a new entity block.
// This should usually just pull a free block, or allocate a new one...
// TODO: The BlockAllocator should *ensure* keeping at least 1 free block so this prevents an allocation
let mut prepared_queries = queries.prepare();
let mut world_shim = SubWorld::new(world, &self.access.components, &self.archetypes);
let cmd = self
.command_buffer
.entry(world.id())
.or_insert_with(|| AtomicRefCell::new(CommandBuffer::new(world)));
info!("Running");
use std::ops::DerefMut;
let mut borrow = self.run_fn.get_mut();
borrow.deref_mut().run(
&mut self.command_buffer.get_mut(),
&mut cmd.get_mut(),
&mut world_shim,
&mut resources,
&mut prepared_queries,
@ -924,7 +927,8 @@ where
/// as singular closures for a given system - providing queries which should be cached for that
/// system, as well as resource access and other metadata.
/// ```rust
/// # use legion::prelude::*;
/// # use legion_core::prelude::*;
/// # use legion_systems::prelude::*;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
/// # struct Position;
/// # #[derive(Copy, Clone, Debug, PartialEq)]
@ -1119,7 +1123,7 @@ where
Box::new(System {
name: self.name,
run_fn: AtomicRefCell::new(run_fn),
resources: self.resources.flatten(),
_resources: PhantomData::<<R as ConsFlatten>::Output>,
queries: AtomicRefCell::new(self.queries.flatten()),
archetypes: if self.access_all_archetypes {
ArchetypeAccess::All
@ -1131,7 +1135,7 @@ where
components: self.component_access,
tags: Access::default(),
},
command_buffer: AtomicRefCell::new(CommandBuffer::default()),
command_buffer: FxHashMap::default(),
})
}
@ -1154,7 +1158,7 @@ where
Box::new(System {
name: self.name,
run_fn: AtomicRefCell::new(run_fn),
resources: self.resources.flatten(),
_resources: PhantomData::<<R as ConsFlatten>::Output>,
queries: AtomicRefCell::new(self.queries.flatten()),
archetypes: if self.access_all_archetypes {
ArchetypeAccess::All
@ -1166,7 +1170,7 @@ where
components: self.component_access,
tags: Access::default(),
},
command_buffer: AtomicRefCell::new(CommandBuffer::default()),
command_buffer: FxHashMap::default(),
})
}
}
@ -1174,8 +1178,8 @@ where
#[cfg(test)]
mod tests {
use super::*;
use crate::prelude::*;
use crate::schedule::*;
use legion_core::prelude::*;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
@ -1206,8 +1210,10 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
world.resources.insert(TestResource(123));
world.resources.insert(TestResourceTwo(123));
let mut resources = Resources::default();
resources.insert(TestResource(123));
resources.insert(TestResourceTwo(123));
let components = vec![
(Pos(1., 2., 3.), Vel(0.1, 0.2, 0.3)),
@ -1290,7 +1296,7 @@ mod tests {
let systems = vec![system_one, system_two, system_three, system_four];
let mut executor = Executor::new(systems);
executor.execute(&mut world);
executor.execute(&mut world, &mut resources);
assert_eq!(*(runs.lock().unwrap()), order);
}
@ -1301,7 +1307,9 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
world.resources.insert(TestResource(123));
let mut resources = Resources::default();
resources.insert(TestResource(123));
let components = vec![
(Pos(1., 2., 3.), Vel(0.1, 0.2, 0.3)),
@ -1333,7 +1341,7 @@ mod tests {
assert_eq!(components.len(), count);
});
system.prepare(&world);
system.run(&world);
system.run(&mut world, &mut resources);
}
#[test]
@ -1342,7 +1350,9 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
world.resources.insert(TestResource(123));
let mut resources = Resources::default();
resources.insert(TestResource(123));
let components = vec![
(Pos(1., 2., 3.), Vel(0.1, 0.2, 0.3)),
@ -1367,7 +1377,7 @@ mod tests {
});
system.prepare(&world);
system.run(&world);
system.run(&mut world, &mut resources);
}
#[test]
@ -1376,6 +1386,7 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
let mut resources = Resources::default();
#[derive(Default, Clone, Copy)]
pub struct Balls(u32);
@ -1410,14 +1421,14 @@ mod tests {
});
system.prepare(&world);
system.run(&world);
system.run(&mut world, &mut resources);
world
.add_component(*(expected.keys().nth(0).unwrap()), Balls::default())
.unwrap();
system.prepare(&world);
system.run(&world);
system.run(&mut world, &mut resources);
}
#[test]
@ -1426,6 +1437,7 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
let mut resources = Resources::default();
#[derive(Default, Clone, Copy)]
pub struct Balls(u32);
@ -1461,12 +1473,15 @@ mod tests {
});
system.prepare(&world);
system.run(&world);
system.run(&mut world, &mut resources);
system.command_buffer_mut().write(&mut world);
system
.command_buffer_mut(world.id())
.unwrap()
.write(&mut world);
system.prepare(&world);
system.run(&world);
system.run(&mut world, &mut resources);
}
#[test]
@ -1480,7 +1495,9 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
world.resources.insert(AtomicRes::default());
let mut resources = Resources::default();
resources.insert(AtomicRes::default());
let system1 = SystemBuilder::<()>::new("TestSystem1")
.write_resource::<AtomicRes>()
@ -1521,13 +1538,12 @@ mod tests {
let mut executor = Executor::new(systems);
pool.install(|| {
for _ in 0..1000 {
executor.execute(&mut world);
executor.execute(&mut world, &mut resources);
}
});
assert_eq!(
world
.resources
resources
.get::<AtomicRes>()
.unwrap()
.0
@ -1548,7 +1564,9 @@ mod tests {
let universe = Universe::new();
let mut world = universe.create_world();
world.resources.insert(AtomicRes::default());
let mut resources = Resources::default();
resources.insert(AtomicRes::default());
let system1 = SystemBuilder::<()>::new("TestSystem1")
.read_resource::<AtomicRes>()
@ -1589,7 +1607,7 @@ mod tests {
let mut executor = Executor::new(systems);
pool.install(|| {
for _ in 0..1000 {
executor.execute(&mut world);
executor.execute(&mut world, &mut resources);
}
});
}
@ -1722,7 +1740,7 @@ mod tests {
let mut executor = Executor::new(systems);
pool.install(|| {
for _ in 0..1000 {
executor.execute(&mut world);
executor.execute(&mut world, &mut Resources::default());
}
});
}

View file

@ -2,8 +2,8 @@
[![Build Status][build_img]][build_lnk] [![Crates.io][crates_img]][crates_lnk] [![Docs.rs][doc_img]][doc_lnk]
[build_img]: https://img.shields.io/travis/TomGillen/legion/master.svg
[build_lnk]: https://travis-ci.org/TomGillen/legion
[build_img]: https://github.com/TomGillen/legion/workflows/CI/badge.svg
[build_lnk]: https://github.com/TomGillen/legion/actions
[crates_img]: https://img.shields.io/crates/v/legion.svg
[crates_lnk]: https://crates.io/crates/legion
[doc_img]: https://docs.rs/legion/badge.svg
@ -11,6 +11,14 @@
Legion aims to be a feature rich high performance ECS library for Rust game projects with minimal boilerplate.
## Bevy Fork Info
This is a fork that enables dynamic plugin loading in bevy.
Here are the changes made:
* ResourceTypeId, ComponentTypeId, TagTypeId use static str (std::any::type_name) instead of TypeId (std::any::TypeId is not constant across rust binaries)
* Implement "DowncastTypeName" to allow downcasting based on type name
## Benchmarks
Based on the [ecs_bench](https://github.com/lschmierer/ecs_bench) project.

View file

@ -225,41 +225,12 @@
//! * `par-schedule`: Configures system schedulers to try and run systems in parallel where possible (enabled by default).
//! * `log`: Configures `tracing` to redirect events to the `log` crate. This is a convenience feature for applications
//! that use `log` and do not wish to interact with `tracing`.
//! * `events`: Enables eventing APIs on worlds (enabled by default).
#![allow(dead_code)]
pub mod borrow;
pub mod command;
#[cfg(feature = "serde-1")]
pub mod de;
pub mod entity;
pub mod event;
pub mod filter;
pub mod iterator;
pub mod query;
pub mod resource;
pub mod schedule;
#[cfg(feature = "serde-1")]
pub mod ser;
pub mod storage;
pub mod system;
pub mod world;
mod cons;
mod tuple;
mod zip;
pub use bit_set;
pub use legion_core::*;
pub use legion_systems as systems;
pub mod prelude {
pub use crate::command::CommandBuffer;
pub use crate::entity::Entity;
pub use crate::event::Event;
pub use crate::filter::filter_fns::*;
pub use crate::query::{IntoQuery, Query, Read, Tagged, TryRead, TryWrite, Write};
pub use crate::resource::{ResourceSet, Resources};
pub use crate::schedule::{Executor, Runnable, Schedulable, Schedule};
pub use crate::system::{System, SystemBuilder};
pub use crate::world::{Universe, World};
pub use bit_set::BitSet;
pub use legion_core::prelude::*;
pub use legion_systems::prelude::*;
}

View file

@ -64,7 +64,7 @@ fn query_try_read_entity_data() {
let query = TryRead::<Rot>::query();
let rots = query
.iter(&mut world)
.iter(&world)
.map(|x| x.map(|x| *x))
.collect::<Vec<_>>();
assert_eq!(rots.iter().filter(|x| x.is_none()).count(), 1);
@ -381,7 +381,7 @@ fn query_read_shared_data() {
let query = Tagged::<Static>::query();
let mut count = 0;
for marker in query.iter(&mut world) {
for marker in query.iter(&world) {
assert_eq!(Static, *marker);
count += 1;
}
@ -587,3 +587,22 @@ fn query_iter_chunks_tag() {
}
}
}
#[test]
fn query_iter_tag() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
world.insert((Static, Model(0)), vec![(0u32,)]);
world.insert((Static, Model(1)), vec![(1u32,)]);
world.insert((Static, Model(2)), vec![(2u32,)]);
let query = <(Tagged<Static>, Tagged<Model>, Read<u32>)>::query();
for (s, m, c) in query.iter(&world) {
assert_eq!(&Static, s);
assert_eq!(&Model(*c), m);
}
}

View file

@ -1,4 +1,5 @@
use legion::prelude::*;
use std::collections::HashSet;
#[derive(Clone, Copy, Debug, PartialEq)]
struct Pos(f32, f32, f32);
@ -85,7 +86,7 @@ fn get_shared() {
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components.clone()) {
for e in world.insert(shared, components) {
entities.push(*e);
}
@ -121,7 +122,7 @@ fn delete() {
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components.clone()) {
for e in world.insert(shared, components) {
entities.push(*e);
}
@ -135,6 +136,45 @@ fn delete() {
}
}
#[test]
fn delete_all() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Static, Model(5));
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut entities: Vec<Entity> = Vec::new();
for e in world.insert(shared, components) {
entities.push(*e);
}
// Check that the entity allocator knows about the entities
for e in entities.iter() {
assert_eq!(true, world.is_alive(*e));
}
// Check that the entities are in storage
let query = <(Read<Pos>, Read<Rot>)>::query();
assert_eq!(2, query.iter(&world).count());
world.delete_all();
// Check that the entity allocator no longer knows about the entities
for e in entities.iter() {
assert_eq!(false, world.is_alive(*e));
}
// Check that the entities are removed from storage
let query = <(Read<Pos>, Read<Rot>)>::query();
assert_eq!(0, query.iter(&world).count());
}
#[test]
fn delete_last() {
let _ = tracing_subscriber::fmt::try_init();
@ -260,15 +300,15 @@ fn mutate_add_component() {
let query_without_scale = <(Read<Pos>, Read<Rot>)>::query();
let query_with_scale = <(Read<Pos>, Read<Rot>, Read<Scale>)>::query();
assert_eq!(3, query_without_scale.iter(&mut world).count());
assert_eq!(0, query_with_scale.iter(&mut world).count());
assert_eq!(3, query_without_scale.iter(&world).count());
assert_eq!(0, query_with_scale.iter(&world).count());
world
.add_component(*entities.get(1).unwrap(), Scale(0.5, 0.5, 0.5))
.unwrap();
assert_eq!(3, query_without_scale.iter(&mut world).count());
assert_eq!(1, query_with_scale.iter(&mut world).count());
assert_eq!(3, query_without_scale.iter(&world).count());
assert_eq!(1, query_with_scale.iter(&world).count());
}
#[test]
@ -290,13 +330,15 @@ fn mutate_remove_component() {
let query_without_rot = Read::<Pos>::query().filter(!component::<Rot>());
let query_with_rot = <(Read<Pos>, Read<Rot>)>::query();
assert_eq!(0, query_without_rot.iter(&mut world).count());
assert_eq!(3, query_with_rot.iter(&mut world).count());
assert_eq!(0, query_without_rot.iter(&world).count());
assert_eq!(3, query_with_rot.iter(&world).count());
world.remove_component::<Rot>(*entities.get(1).unwrap());
world
.remove_component::<Rot>(*entities.get(1).unwrap())
.unwrap();
assert_eq!(1, query_without_rot.iter(&mut world).count());
assert_eq!(2, query_with_rot.iter(&mut world).count());
assert_eq!(1, query_without_rot.iter(&world).count());
assert_eq!(2, query_with_rot.iter(&world).count());
}
#[test]
@ -318,13 +360,13 @@ fn mutate_add_tag() {
let query_without_static = <(Read<Pos>, Read<Rot>)>::query();
let query_with_static = <(Read<Pos>, Read<Rot>, Tagged<Static>)>::query();
assert_eq!(3, query_without_static.iter(&mut world).count());
assert_eq!(0, query_with_static.iter(&mut world).count());
assert_eq!(3, query_without_static.iter(&world).count());
assert_eq!(0, query_with_static.iter(&world).count());
world.add_tag(*entities.get(1).unwrap(), Static);
world.add_tag(*entities.get(1).unwrap(), Static).unwrap();
assert_eq!(3, query_without_static.iter(&mut world).count());
assert_eq!(1, query_with_static.iter(&mut world).count());
assert_eq!(3, query_without_static.iter(&world).count());
assert_eq!(1, query_with_static.iter(&world).count());
}
#[test]
@ -346,13 +388,15 @@ fn mutate_remove_tag() {
let query_without_static = <(Read<Pos>, Read<Rot>)>::query().filter(!tag::<Static>());
let query_with_static = <(Read<Pos>, Read<Rot>, Tagged<Static>)>::query();
assert_eq!(0, query_without_static.iter(&mut world).count());
assert_eq!(3, query_with_static.iter(&mut world).count());
assert_eq!(0, query_without_static.iter(&world).count());
assert_eq!(3, query_with_static.iter(&world).count());
world.remove_tag::<Static>(*entities.get(1).unwrap());
world
.remove_tag::<Static>(*entities.get(1).unwrap())
.unwrap();
assert_eq!(1, query_without_static.iter(&mut world).count());
assert_eq!(2, query_with_static.iter(&mut world).count());
assert_eq!(1, query_without_static.iter(&world).count());
assert_eq!(2, query_with_static.iter(&world).count());
}
#[test]
@ -368,12 +412,48 @@ fn mutate_change_tag_minimum_test() {
let entities = world.insert(shared, components).to_vec();
tracing::trace!("STARTING CHANGE");
world.add_tag(entities[0], Model(3));
world.add_tag(entities[0], Model(3)).unwrap();
tracing::trace!("CHANGED\n");
assert_eq!(*world.get_tag::<Model>(entities[0]).unwrap(), Model(3));
}
#[test]
fn delete_entities_on_drop() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let (tx, rx) = crossbeam_channel::unbounded::<legion::event::Event>();
let shared = (Model(5),);
let components = vec![(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3))];
// Insert the data and store resulting entities in a HashSet
let mut entities = HashSet::new();
for entity in world.insert(shared, components) {
entities.insert(*entity);
}
world.subscribe(tx, legion::filter::filter_fns::any());
//ManuallyDrop::drop(&mut world);
std::mem::drop(world);
for e in rx.try_recv() {
match e {
legion::event::Event::EntityRemoved(entity, _chunk_id) => {
assert!(entities.remove(&entity));
}
_ => {}
}
}
// Verify that no extra entities are included
assert!(entities.is_empty());
}
#[test]
#[allow(clippy::suspicious_map)]
fn mutate_change_tag() {
@ -394,11 +474,11 @@ fn mutate_change_tag() {
let query_model_3 = <(Read<Pos>, Read<Rot>)>::query().filter(tag_value(&Model(3)));
let query_model_5 = <(Read<Pos>, Read<Rot>)>::query().filter(tag_value(&Model(5)));
assert_eq!(3, query_model_5.iter(&mut world).count());
assert_eq!(0, query_model_3.iter(&mut world).count());
assert_eq!(3, query_model_5.iter(&world).count());
assert_eq!(0, query_model_3.iter(&world).count());
tracing::trace!("STARTING CHANGE");
world.add_tag(*entities.get(1).unwrap(), Model(3));
world.add_tag(*entities.get(1).unwrap(), Model(3)).unwrap();
tracing::trace!("CHANGED\n");
assert_eq!(
@ -416,5 +496,55 @@ fn mutate_change_tag() {
Model(3)
);
assert_eq!(2, query_model_5.iter(&mut world).count());
assert_eq!(2, query_model_5.iter(&world).count());
}
// This test repeatedly creates a world with new entities and drops it, reproducing
// https://github.com/TomGillen/legion/issues/92
#[test]
fn lots_of_deletes() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
for _ in 0..10000 {
let shared = (Model(5),);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
let mut world = universe.create_world();
world.insert(shared, components).to_vec();
}
}
#[test]
fn iter_entities() {
let _ = tracing_subscriber::fmt::try_init();
let universe = Universe::new();
let mut world = universe.create_world();
let shared = (Model(5),);
let components = vec![
(Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
(Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),
];
// Insert the data and store resulting entities in a HashSet
let mut entities = HashSet::new();
for entity in world.insert(shared, components) {
entities.insert(*entity);
}
// Verify that all entities in iter_entities() are included
for entity in world.iter_entities() {
assert!(entities.remove(&entity));
}
// Verify that no extra entities are included
assert!(entities.is_empty());
}

View file

@ -7,8 +7,7 @@ edition = "2018"
license = "MIT"
[dependencies]
# legion = { path = "../bevy_legion" }
legion = { git = "https://github.com/TomGillen/legion", rev = "c5b9628630d4f9fc54b6843b5ce02d0669434a61", features = ["serialize"] }
legion = { path = "../bevy_legion", features = ["serialize"]}
glam = "0.8.3"
log = "0.4"
rayon = "1.2"

View file

@ -13,7 +13,7 @@ use serde::{
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{
any::TypeId, cell::RefCell, collections::HashMap, iter::FromIterator, marker::PhantomData,
any::type_name, cell::RefCell, collections::HashMap, iter::FromIterator, marker::PhantomData,
ptr::NonNull,
};
use type_uuid::TypeUuid;
@ -101,7 +101,7 @@ impl<'de, 'a, T: for<'b> Deserialize<'b> + 'static> Visitor<'de>
#[derive(Clone)]
pub struct TagRegistration {
uuid: type_uuid::Bytes,
ty: TypeId,
ty: String,
tag_serialize_fn: fn(&TagStorage, &mut dyn FnMut(&dyn erased_serde::Serialize)),
tag_deserialize_fn: fn(
deserializer: &mut dyn erased_serde::Deserializer,
@ -123,7 +123,7 @@ impl TagRegistration {
>() -> Self {
Self {
uuid: T::UUID,
ty: TypeId::of::<T>(),
ty: type_name::<T>().to_string(),
tag_serialize_fn: |tag_storage, serialize_fn| {
// it's safe because we know this is the correct type due to lookup
let slice = unsafe { tag_storage.data_slice::<T>() };
@ -150,7 +150,7 @@ impl TagRegistration {
#[derive(Clone)]
pub struct ComponentRegistration {
uuid: type_uuid::Bytes,
ty: TypeId,
ty: String,
comp_serialize_fn: fn(&ComponentResourceSet, &mut dyn FnMut(&dyn erased_serde::Serialize)),
comp_deserialize_fn: fn(
deserializer: &mut dyn erased_serde::Deserializer,
@ -164,7 +164,7 @@ impl ComponentRegistration {
{
Self {
uuid: T::UUID,
ty: TypeId::of::<T>(),
ty: type_name::<T>().to_string(),
comp_serialize_fn: |comp_storage, serialize_fn| {
// it's safe because we know this is the correct type due to lookup
let slice = unsafe { comp_storage.data_slice::<T>() };
@ -192,8 +192,8 @@ struct SerializedArchetypeDescription {
}
pub struct SerializeImpl {
pub tag_types: HashMap<TypeId, TagRegistration>,
pub comp_types: HashMap<TypeId, ComponentRegistration>,
pub tag_types: HashMap<String, TagRegistration>,
pub comp_types: HashMap<String, ComponentRegistration>,
pub entity_map: RefCell<HashMap<Entity, uuid::Bytes>>,
}
@ -242,10 +242,10 @@ impl SerializeImpl {
impl legion::serialize::ser::WorldSerializer for SerializeImpl {
fn can_serialize_tag(&self, ty: &TagTypeId, _meta: &TagMeta) -> bool {
self.tag_types.get(&ty.0).is_some()
self.tag_types.get(ty.0).is_some()
}
fn can_serialize_component(&self, ty: &ComponentTypeId, _meta: &ComponentMeta) -> bool {
self.comp_types.get(&ty.0).is_some()
self.comp_types.get(ty.0).is_some()
}
fn serialize_archetype_description<S: Serializer>(
&self,
@ -255,13 +255,13 @@ impl legion::serialize::ser::WorldSerializer for SerializeImpl {
let tags_to_serialize = archetype_desc
.tags()
.iter()
.filter_map(|(ty, _)| self.tag_types.get(&ty.0))
.filter_map(|(ty, _)| self.tag_types.get(ty.0))
.map(|reg| reg.uuid)
.collect::<Vec<_>>();
let components_to_serialize = archetype_desc
.components()
.iter()
.filter_map(|(ty, _)| self.comp_types.get(&ty.0))
.filter_map(|(ty, _)| self.comp_types.get(ty.0))
.map(|reg| reg.uuid)
.collect::<Vec<_>>();
SerializedArchetypeDescription {
@ -277,7 +277,7 @@ impl legion::serialize::ser::WorldSerializer for SerializeImpl {
_component_meta: &ComponentMeta,
components: &ComponentResourceSet,
) -> Result<S::Ok, S::Error> {
if let Some(reg) = self.comp_types.get(&component_type.0) {
if let Some(reg) = self.comp_types.get(component_type.0) {
let result = RefCell::new(None);
let serializer = RefCell::new(Some(serializer));
{
@ -303,7 +303,7 @@ impl legion::serialize::ser::WorldSerializer for SerializeImpl {
_tag_meta: &TagMeta,
tags: &TagStorage,
) -> Result<S::Ok, S::Error> {
if let Some(reg) = self.tag_types.get(&tag_type.0) {
if let Some(reg) = self.tag_types.get(tag_type.0) {
let result = RefCell::new(None);
let serializer = RefCell::new(Some(serializer));
{
@ -337,8 +337,8 @@ impl legion::serialize::ser::WorldSerializer for SerializeImpl {
}
pub struct DeserializeImpl {
pub tag_types: HashMap<TypeId, TagRegistration>,
pub comp_types: HashMap<TypeId, ComponentRegistration>,
pub tag_types: HashMap<String, TagRegistration>,
pub comp_types: HashMap<String, ComponentRegistration>,
pub tag_types_by_uuid: HashMap<type_uuid::Bytes, TagRegistration>,
pub comp_types_by_uuid: HashMap<type_uuid::Bytes, ComponentRegistration>,
pub entity_map: RefCell<HashMap<uuid::Bytes, Entity>>,
@ -346,8 +346,8 @@ pub struct DeserializeImpl {
impl DeserializeImpl {
pub fn new(
component_types: HashMap<TypeId, ComponentRegistration>,
tag_types: HashMap<TypeId, TagRegistration>,
component_types: HashMap<String, ComponentRegistration>,
tag_types: HashMap<String, TagRegistration>,
entity_map: RefCell<HashMap<Entity, uuid::Bytes>>,
) -> Self {
DeserializeImpl {
@ -399,7 +399,7 @@ impl legion::serialize::de::WorldDeserializer for DeserializeImpl {
_component_meta: &ComponentMeta,
get_next_storage_fn: &mut dyn FnMut() -> Option<(NonNull<u8>, usize)>,
) -> Result<(), <D as Deserializer<'de>>::Error> {
if let Some(reg) = self.comp_types.get(&component_type.0) {
if let Some(reg) = self.comp_types.get(component_type.0) {
let mut erased = erased_serde::Deserializer::erase(deserializer);
(reg.comp_deserialize_fn)(&mut erased, get_next_storage_fn)
.map_err(<<D as serde::Deserializer<'de>>::Error as serde::de::Error>::custom)?;
@ -415,7 +415,7 @@ impl legion::serialize::de::WorldDeserializer for DeserializeImpl {
_tag_meta: &TagMeta,
tags: &mut TagStorage,
) -> Result<(), <D as Deserializer<'de>>::Error> {
if let Some(reg) = self.tag_types.get(&tag_type.0) {
if let Some(reg) = self.tag_types.get(tag_type.0) {
let mut erased = erased_serde::Deserializer::erase(deserializer);
(reg.tag_deserialize_fn)(&mut erased, tags)
.map_err(<<D as serde::Deserializer<'de>>::Error as serde::de::Error>::custom)?;