upgrade legion

This commit is contained in:
Carter Anderson 2020-05-21 17:59:33 -07:00
parent d920100d35
commit 38669107c9
14 changed files with 827 additions and 125 deletions

View file

@ -1,7 +1,6 @@
//! Atomic runtime borrow checking module.
//! These types implement something akin to `RefCell`, but are atomically handled allowing them to
//! cross thread boundaries.
use std::any::{type_name, Any};
use std::cell::UnsafeCell;
use std::hash::{Hash, Hasher};
use std::ops::Deref;
@ -11,13 +10,6 @@ use std::sync::atomic::AtomicIsize;
#[cfg(not(debug_assertions))]
use std::marker::PhantomData;
pub trait DowncastTypename {
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T>;
fn downcast_typename_ref<T: Any>(&self) -> Option<&T>;
fn is_typename<T: Any>(&self) -> bool;
}
pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str { type_name::<T>() }
/// A `RefCell` implementation which is thread safe. This type performs all the standard runtime
/// borrow checking which would be familiar from using `RefCell`.
///
@ -405,6 +397,11 @@ impl<'a, T: 'a> AsRef<T> for RefMut<'a, T> {
fn as_ref(&self) -> &T { self.value }
}
impl<'a, T: 'a> AsMut<T> for RefMut<'a, T> {
#[inline(always)]
fn as_mut(&mut self) -> &mut T { self.value }
}
impl<'a, T: 'a> std::borrow::Borrow<T> for RefMut<'a, T> {
#[inline(always)]
fn borrow(&self) -> &T { self.value }
@ -529,6 +526,11 @@ impl<'a, T: 'a> AsRef<T> for RefMapMut<'a, T> {
fn as_ref(&self) -> &T { &self.value }
}
impl<'a, T: 'a> AsMut<T> for RefMapMut<'a, T> {
#[inline(always)]
fn as_mut(&mut self) -> &mut T { &mut self.value }
}
impl<'a, T: 'a> std::borrow::Borrow<T> for RefMapMut<'a, T> {
#[inline(always)]
fn borrow(&self) -> &T { &self.value }

View file

@ -1,13 +1,13 @@
use crate::{
borrow::AtomicRefCell,
cons::{ConsAppend, ConsFlatten},
entity::{Entity, GuidEntityAllocator},
entity::Entity,
filter::{ChunksetFilterData, Filter},
storage::{Component, ComponentTypeId, Tag, TagTypeId},
world::{
ComponentSource, ComponentTupleSet, IntoComponentSource, PreallocComponentSource,
TagLayout, TagSet, World, WorldId,
},
}, guid_entity_allocator::GuidEntityAllocator,
};
use derivative::Derivative;
use smallvec::SmallVec;

View file

@ -0,0 +1,9 @@
use std::any::{type_name, Any};
pub trait DowncastTypename {
fn downcast_typename_mut<T: Any>(&mut self) -> Option<&mut T>;
fn downcast_typename_ref<T: Any>(&self) -> Option<&T>;
fn is_typename<T: Any>(&self) -> bool;
}
pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str { type_name::<T>() }

View file

@ -7,7 +7,7 @@ use std::fmt::Display;
use std::num::Wrapping;
use std::ops::Deref;
use std::ops::DerefMut;
use std::{collections::HashSet, sync::Arc};
use std::sync::Arc;
pub type EntityIndex = u32;
pub(crate) type EntityVersion = Wrapping<u32>;
@ -247,65 +247,6 @@ impl DerefMut for Blocks {
fn deref_mut(&mut self) -> &mut Self::Target { self.blocks.deref_mut() }
}
#[derive(Default, Debug, Clone)]
pub struct GuidEntityAllocator {
entities: Arc<RwLock<HashSet<Entity>>>,
next_ids: Arc<RwLock<Vec<Entity>>>,
}
impl GuidEntityAllocator {
pub fn is_alive(&self, entity: Entity) -> bool {
self.entities.read().contains(&entity)
}
pub fn push_next_ids(&self, ids: impl Iterator<Item = Entity>) {
self.next_ids.write().extend(ids);
}
/// Allocates a new unused `Entity` ID.
pub fn create_entity(&self) -> Entity {
let entity = if !self.next_ids.read().is_empty() {
self.next_ids.write().pop().unwrap()
} else {
Entity::new(rand::random::<u32>(), Wrapping(1))
};
self.entities.write().insert(entity);
entity
}
/// Creates an iterator which allocates new `Entity` IDs.
pub fn create_entities(&self) -> GuidCreateEntityIter {
GuidCreateEntityIter {
allocator: self,
}
}
pub(crate) fn delete_entity(&self, entity: Entity) -> bool {
self.entities.write().remove(&entity)
}
pub(crate) fn delete_all_entities(&self) {
self.entities.write().clear();
}
pub(crate) fn merge(&self, other: GuidEntityAllocator) {
self.entities.write().extend(other.entities.write().drain())
}
}
pub struct GuidCreateEntityIter<'a> {
allocator: &'a GuidEntityAllocator
}
impl<'a> Iterator for GuidCreateEntityIter<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
Some(self.allocator.create_entity())
}
}
/// Manages the allocation and deletion of `Entity` IDs within a world.
#[derive(Debug)]
pub struct EntityAllocator {

View file

@ -0,0 +1,54 @@
use crate::entity::Entity;
use parking_lot::RwLock;
use std::{collections::HashSet, num::Wrapping, sync::Arc};
#[derive(Default, Debug, Clone)]
pub struct GuidEntityAllocator {
entities: Arc<RwLock<HashSet<Entity>>>,
next_ids: Arc<RwLock<Vec<Entity>>>,
}
impl GuidEntityAllocator {
pub fn is_alive(&self, entity: Entity) -> bool { self.entities.read().contains(&entity) }
pub fn push_next_ids(&self, ids: impl Iterator<Item = Entity>) {
self.next_ids.write().extend(ids);
}
/// Allocates a new unused `Entity` ID.
pub fn create_entity(&self) -> Entity {
let entity = if !self.next_ids.read().is_empty() {
self.next_ids.write().pop().unwrap()
} else {
Entity::new(rand::random::<u32>(), Wrapping(1))
};
self.entities.write().insert(entity);
entity
}
/// Creates an iterator which allocates new `Entity` IDs.
pub fn create_entities(&self) -> GuidCreateEntityIter {
GuidCreateEntityIter { allocator: self }
}
pub(crate) fn delete_entity(&self, entity: Entity) -> bool {
self.entities.write().remove(&entity)
}
pub(crate) fn delete_all_entities(&self) { self.entities.write().clear(); }
pub(crate) fn merge(&self, other: GuidEntityAllocator) {
self.entities.write().extend(other.entities.write().drain())
}
}
pub struct GuidCreateEntityIter<'a> {
allocator: &'a GuidEntityAllocator,
}
impl<'a> Iterator for GuidCreateEntityIter<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> { Some(self.allocator.create_entity()) }
}

View file

@ -11,6 +11,8 @@ pub mod iterator;
pub mod query;
pub mod storage;
pub mod world;
pub mod downcast_typename;
pub mod guid_entity_allocator;
#[cfg(feature = "serialize")]
pub mod serialize;

View file

@ -940,7 +940,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -996,7 +998,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1050,7 +1054,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1104,7 +1110,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1140,7 +1148,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1177,7 +1187,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1246,7 +1258,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1298,7 +1312,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///
@ -1350,7 +1366,9 @@ where
///
/// # Safety
///
/// Incorrectly accessing components that are already borrowed elsewhere is undefined behavior.
/// The normal borrowing restrictions apply for the duration of the iteration:
/// * Components borrowed with `Read` access must not be borrowed mutably elsewhere.
/// * Components borrowed with `Write` access must not be borrowed elsewhere at all.
///
/// # Panics
///

View file

@ -1,11 +1,11 @@
use crate::{
entity::{Entity, GuidEntityAllocator},
entity::Entity,
index::{ArchetypeIndex, ChunkIndex, SetIndex},
storage::{
ArchetypeData, ArchetypeDescription, Chunkset, ComponentMeta, ComponentTypeId, TagMeta,
TagStorage, TagTypeId,
},
world::World,
world::World, guid_entity_allocator::GuidEntityAllocator,
};
use serde::{
self,

View file

@ -697,34 +697,65 @@ impl ArchetypeData {
/// Gets the unique ID of this archetype.
pub fn id(&self) -> ArchetypeId { self.id }
pub(crate) fn merge(&mut self, mut other: ArchetypeData) {
let other_tags = &other.tags;
for (other_index, mut set) in other.chunk_sets.drain(..).enumerate() {
// search for a matching chunk set
let mut set_match = None;
for self_index in 0..self.chunk_sets.len() {
let mut matches = true;
for &(type_id, ref tags) in self.tags.0.iter() {
unsafe {
let (self_tag_ptr, size, _) = tags.data_raw();
let (other_tag_ptr, _, _) = other_tags.get(type_id).unwrap().data_raw();
fn find_chunk_set_by_tags(
&self,
other_tags: &Tags,
other_set_index: SetIndex,
) -> Option<SetIndex> {
// search for a matching chunk set
let mut set_match = None;
for self_set_index in 0..self.chunk_sets.len() {
let self_set_index = SetIndex(self_set_index);
let mut matches = true;
for &(type_id, ref tags) in self.tags.0.iter() {
unsafe {
let (self_tag_ptr, size, _) = tags.data_raw();
let (other_tag_ptr, _, _) = other_tags.get(type_id).unwrap().data_raw();
if !tags.element().equals(
self_tag_ptr.as_ptr().add(self_index * size),
other_tag_ptr.as_ptr().add(other_index * size),
) {
matches = false;
break;
}
if !tags.element().equals(
self_tag_ptr.as_ptr().add(self_set_index.0 * size),
other_tag_ptr.as_ptr().add(other_set_index.0 * size),
) {
matches = false;
break;
}
}
if matches {
set_match = Some(self_index);
break;
}
}
if matches {
set_match = Some(self_set_index);
break;
}
}
set_match
}
pub(crate) fn find_or_create_chunk_set_by_tags(
&mut self,
src_tags: &Tags,
src_chunk_set_index: SetIndex,
) -> SetIndex {
let dst_chunk_set_index = self.find_chunk_set_by_tags(src_tags, src_chunk_set_index);
dst_chunk_set_index.unwrap_or_else(|| {
self.alloc_chunk_set(|self_tags| {
for (type_id, other_tags) in src_tags.0.iter() {
unsafe {
let (src, _, _) = other_tags.data_raw();
let dst = self_tags.get_mut(*type_id).unwrap().alloc_ptr();
other_tags.element().clone(src.as_ptr(), dst);
}
}
})
})
}
pub(crate) fn move_from(&mut self, mut other: ArchetypeData) {
let other_tags = &other.tags;
for (other_index, mut set) in other.chunk_sets.drain(..).enumerate() {
let other_index = SetIndex(other_index);
let set_match = self.find_chunk_set_by_tags(&other_tags, other_index);
if let Some(chunk_set) = set_match {
// if we found a match, move the chunks into the set
let target = &mut self.chunk_sets[chunk_set];
@ -750,6 +781,276 @@ impl ArchetypeData {
self.tags.validate(self.chunk_sets.len());
}
/// Given a source world and archetype, step through all of its chunks and copy the data in it
/// into this archetype. The archetype index is provided so that we can produce EntityLocations
/// During this process, we can replace pre-existing entities. This function assumes that any
/// entity referenced in replace_mappings actually exists in the world. The public API in world
/// checks this assumption and panics if it is violated.
///
/// See also `clone_from_single`, which copies a specific entity
#[allow(clippy::too_many_arguments)]
pub(crate) fn clone_from<
's,
CloneImplT: crate::world::CloneImpl,
CloneImplResultT: crate::world::CloneImplResult,
EntityReplacePolicyT: crate::world::EntityReplacePolicy<'s>,
>(
&mut self,
src_world: &crate::world::World,
src_archetype: &ArchetypeData,
dst_archetype_index: ArchetypeIndex,
dst_entity_allocator: &crate::guid_entity_allocator::GuidEntityAllocator,
dst_entity_locations: &mut crate::entity::Locations,
clone_impl: &CloneImplT,
clone_impl_result: &mut CloneImplResultT,
entity_replace_policy: &EntityReplacePolicyT,
) {
// Iterate all the chunk sets within the source archetype
let src_tags = &src_archetype.tags;
for (src_chunk_set_index, src_chunk_set) in src_archetype.chunk_sets.iter().enumerate() {
let src_chunk_set_index = SetIndex(src_chunk_set_index);
let dst_chunk_set_index =
self.find_or_create_chunk_set_by_tags(src_tags, src_chunk_set_index);
// Iterate all the chunks within the source chunk set
for (_src_chunk_idx, src_chunk) in src_chunk_set.chunks.iter().enumerate() {
// Copy the data from source to destination. Continuously find or create chunks as
// needed until we've copied all the data
let mut entities_remaining = src_chunk.len();
while entities_remaining > 0 {
// Get or allocate a chunk.. since we could be transforming to a larger component size, it's possible
// that even a brand-new, empty chunk won't be large enough to hold everything in the chunk we are copying from
let dst_free_chunk_index =
self.get_free_chunk(dst_chunk_set_index, entities_remaining);
let dst_chunk_set = &mut self.chunk_sets[dst_chunk_set_index];
let dst_chunk = &mut dst_chunk_set.chunks[dst_free_chunk_index];
// Determine how many entities we will write
let entities_to_write =
std::cmp::min(entities_remaining, dst_chunk.capacity() - dst_chunk.len());
// Prepare to write to the chunk storage
let mut writer = dst_chunk.writer();
let (dst_entities, dst_components) = writer.get();
// Find the region of memory we will be reading from in the source chunk
let src_begin_idx = ComponentIndex(src_chunk.len() - entities_remaining);
let src_end_idx = ComponentIndex(src_begin_idx.0 + entities_to_write);
let dst_begin_idx = ComponentIndex(dst_entities.len());
let dst_end_idx = ComponentIndex(dst_entities.len() + entities_to_write);
// Copy all the entities to the destination chunk. The normal case is that we simply allocate
// new entities.
//
// We also allow end-user to specify a HashMap<Entity, Entity>. The key is an Entity from
// the source chunk and the value is an Entity from the destination chunk. Rather than appending
// data to the destination chunk, we will *replace* the data, according to the mapping. This
// is specifically intended for use with hot-reloading data. When some source data is changed,
// we can use the mapping to respawn entities as needed using the new data.
// We know how many entities will be appended to this list
dst_entities.reserve(dst_entities.len() + entities_to_write);
for src_entity in &src_chunk.entities[src_begin_idx.0..src_end_idx.0] {
// Determine if there is an entity we will be replacing
let dst_entity = entity_replace_policy.get_dst_entity(*src_entity);
// The location of the next entity
let location = EntityLocation::new(
dst_archetype_index,
dst_chunk_set_index,
dst_free_chunk_index,
ComponentIndex(dst_entities.len()),
);
// Determine the Entity to use for this element
let dst_entity = if let Some(dst_entity) = dst_entity {
// We are replacing data
// Verify that the entity is alive.. this checks the index and version of the entity
// The entity should be alive because World::clone_from verifies this
debug_assert!(dst_entity_allocator.is_alive(dst_entity));
dst_entity
} else {
// We are appending data, allocate a new entity
dst_entity_allocator.create_entity()
};
dst_entity_locations.set(dst_entity, location);
dst_entities.push(dst_entity);
clone_impl_result.add_result(*src_entity, dst_entity);
}
ArchetypeData::clone_components(
clone_impl,
src_world,
src_archetype,
src_chunk,
src_begin_idx..src_end_idx,
&dst_entities[dst_begin_idx.0..dst_end_idx.0],
dst_components,
entities_to_write,
);
entities_remaining -= entities_to_write;
}
}
}
}
/// Given a source world, archetype, and entity, copy it into this archetype. The archetype
/// index is provided so that we can produce EntityLocations.
/// During this process, we can replace a pre-existing entity. This function assumes that if
/// replace_mapping is not none, that the entity exists. The public API in world checks this
/// assumption and panics if it is violated.
///
/// See also `clone_from`, which copies all data
#[allow(clippy::too_many_arguments)]
pub(crate) fn clone_from_single<C: crate::world::CloneImpl>(
&mut self,
src_world: &crate::world::World,
src_archetype: &ArchetypeData,
src_location: &EntityLocation,
dst_archetype_index: ArchetypeIndex,
dst_entity_allocator: &crate::guid_entity_allocator::GuidEntityAllocator,
dst_entity_locations: &mut crate::entity::Locations,
clone_impl: &C,
replace_mapping: Option<Entity>,
) -> Entity {
// We are reading from a specific chunk set within the source archetype
let src_tags = &src_archetype.tags;
let src_chunk_set_index = src_location.set();
let src_chunk_set = &src_archetype.chunk_sets[src_chunk_set_index];
// Find or create the chunk set that matches the source chunk set
let dst_chunk_set_index =
self.find_or_create_chunk_set_by_tags(src_tags, src_chunk_set_index);
// Get the source chunk
let src_chunk_idx = src_location.chunk();
let src_chunk = &src_chunk_set.chunks[src_chunk_idx];
// Get or allocate a chunk.. since we could be transforming to a larger component size, it's possible
// that even a brand-new, empty chunk won't be large enough to hold everything in the chunk we are copying from
let dst_free_chunk_index = self.get_free_chunk(dst_chunk_set_index, 1);
let dst_chunk_set = &mut self.chunk_sets[dst_chunk_set_index];
let dst_chunk = &mut dst_chunk_set.chunks[dst_free_chunk_index];
// Determine how many entities we will write
let entities_to_write = 1;
// Prepare to write to the chunk storage
let mut writer = dst_chunk.writer();
let (dst_entities, dst_components) = writer.get();
// Find the region of memory we will be reading from in the source chunk
let src_begin_idx = src_location.component();
let src_end_idx = ComponentIndex(src_begin_idx.0 + 1);
// We know how many entities will be appended to this list
let dst_begin_idx = ComponentIndex(dst_entities.len());
let dst_end_idx = ComponentIndex(dst_entities.len() + entities_to_write);
// Copy the entity to the destination chunk. The normal case is that we simply allocate
// a new entity.
//
// We also allow end-user to specify a Option<Entity>. The src Entity from will *replace* the
// data of the given Entity
// The location of the next entity
let location = EntityLocation::new(
dst_archetype_index,
dst_chunk_set_index,
dst_free_chunk_index,
ComponentIndex(dst_entities.len()),
);
let dst_entity = if let Some(dst_entity) = replace_mapping {
// We are replacing data
// Verify that the entity is alive.. this checks the index and version of the entity
// The entity should be alive because World::clone_from verifies this
debug_assert!(dst_entity_allocator.is_alive(dst_entity));
dst_entity
} else {
// We are appending data, allocate a new entity
dst_entity_allocator.create_entity()
};
dst_entity_locations.set(dst_entity, location);
dst_entities.push(dst_entity);
ArchetypeData::clone_components(
clone_impl,
src_world,
src_archetype,
src_chunk,
src_begin_idx..src_end_idx,
&dst_entities[dst_begin_idx.0..dst_end_idx.0],
dst_components,
entities_to_write,
);
dst_entity
}
/// Implements shared logic between `clone_from` and `clone_from_single`. For every component type,
/// in the given archetype,
#[allow(clippy::too_many_arguments)]
fn clone_components<C: crate::world::CloneImpl>(
clone_impl: &C,
src_world: &crate::world::World,
src_archetype: &ArchetypeData,
src_chunk: &ComponentStorage,
src_range: core::ops::Range<ComponentIndex>,
dst_entities: &[Entity],
dst_components: &UnsafeCell<Components>,
entities_to_write: usize,
) {
for (src_type, _) in src_archetype.description().components() {
let dst_components = unsafe { &mut *dst_components.get() };
// Look up what type we should transform the data into (can be the same type, meaning it should be cloned)
let (dst_type, _) = clone_impl.map_component_type(*src_type);
// Create a writer that will insert the data into the destination chunk
let mut dst_component_writer = dst_components
.get_mut(dst_type)
.expect("ComponentResourceSet missing in clone_from")
.writer();
// Find the data in the source chunk
let src_component_storage = src_chunk
.components(*src_type)
.expect("ComponentResourceSet missing in clone_from");
// Now copy the data
unsafe {
let (src_component_chunk_data, src_element_size, _) =
src_component_storage.data_raw();
// offset to the first entity we want to copy from the source chunk
let src_data = src_component_chunk_data.add(src_element_size * src_range.start.0);
// allocate the space we need in the destination chunk
let dst_data = dst_component_writer.reserve_raw(entities_to_write).as_ptr();
// Delegate the clone operation to the provided CloneImpl
clone_impl.clone_components(
src_world,
src_chunk,
src_range.clone(),
*src_type,
&src_chunk.entities[src_range.start.0..src_range.end.0],
dst_entities,
src_data,
dst_data,
entities_to_write,
);
}
}
}
/// Iterate all entities in existence by iterating across archetypes, chunk sets, and chunks
pub(crate) fn iter_entities<'a>(&'a self) -> impl Iterator<Item = Entity> + 'a {
self.chunk_sets.iter().flat_map(move |set| {

View file

@ -2,8 +2,9 @@ use crate::borrow::Ref;
use crate::borrow::RefMut;
use crate::entity::BlockAllocator;
use crate::entity::Entity;
use crate::guid_entity_allocator::GuidEntityAllocator;
use crate::entity::EntityLocation;
use crate::entity::{GuidEntityAllocator, Locations};
use crate::entity::Locations;
use crate::event::Event;
use crate::filter::ArchetypeFilterData;
use crate::filter::ChunksetFilterData;
@ -27,6 +28,7 @@ use crate::storage::Tags;
use crate::tuple::TupleEq;
use parking_lot::Mutex;
use std::cell::UnsafeCell;
use std::collections::HashMap;
use std::iter::Enumerate;
use std::iter::Fuse;
use std::iter::FusedIterator;
@ -292,18 +294,9 @@ impl World {
}
if self.entity_allocator.delete_entity(entity) {
// find entity's chunk
let location = self.entity_locations.get(entity).unwrap();
let chunk = self.storage_mut().chunk_mut(location).unwrap();
// swap remove with last entity in chunk
if let Some(swapped) = chunk.swap_remove(location.component(), true) {
// record swapped entity's new location
self.entity_locations.set(swapped, location);
}
self.delete_location(location);
trace!(world = self.id().0, ?entity, "Deleted entity");
true
} else {
false
@ -319,6 +312,17 @@ impl World {
self.entity_allocator.delete_all_entities();
}
fn delete_location(&mut self, location: EntityLocation) {
// find entity's chunk
let chunk = self.storage_mut().chunk_mut(location).unwrap();
// swap remove with last entity in chunk
if let Some(swapped) = chunk.swap_remove(location.component(), true) {
// record swapped entity's new location
self.entity_locations.set(swapped, location);
}
}
fn find_chunk_with_delta(
&mut self,
source_location: EntityLocation,
@ -728,6 +732,33 @@ impl World {
/// Determines if the given `Entity` is alive within this `World`.
pub fn is_alive(&self, entity: Entity) -> bool { self.entity_allocator.is_alive(entity) }
/// Returns the entity's component types, if the entity exists.
pub fn entity_component_types(
&self,
entity: Entity,
) -> Option<&[(ComponentTypeId, ComponentMeta)]> {
if !self.is_alive(entity) {
return None;
}
let location = self.entity_locations.get(entity);
let archetype = location
.map(|location| self.storage().archetype(location.archetype()))
.unwrap_or(None);
archetype.map(|archetype| archetype.description().components())
}
/// Returns the entity's tag types, if the entity exists.
pub fn entity_tag_types(&self, entity: Entity) -> Option<&[(TagTypeId, TagMeta)]> {
if !self.is_alive(entity) {
return None;
}
let location = self.entity_locations.get(entity);
let archetype = location
.map(|location| self.storage().archetype(location.archetype()))
.unwrap_or(None);
archetype.map(|archetype| archetype.description().tags())
}
/// Iteratively defragments the world's internal memory.
///
/// This compacts entities into fewer more continuous chunks.
@ -765,9 +796,9 @@ impl World {
}
}
/// Merge this world with another, copying all appropriate archetypes, tags entities and components
/// into this world.
pub fn merge(&mut self, world: World) {
/// Move entities from a world to this world, copying all appropriate archetypes,
/// tags entities and components into this world.
pub fn move_from(&mut self, world: World) {
let span =
span!(Level::INFO, "Merging worlds", source = world.id().0, destination = ?self.id());
let _guard = span.enter();
@ -793,7 +824,7 @@ impl World {
self.storage_mut()
.archetype_mut(arch_index)
.unwrap()
.merge(archetype);
.move_from(archetype);
arch_index
} else {
// archetype does not already exist, append
@ -810,6 +841,199 @@ impl World {
}
}
/// This will *copy* the data from `src_world` into this world. The logic to do the copy is
/// delegated to the `clone_impl` provided by the user. In addition to simple copying, it's also
/// possible to transform from one type to another. This is useful for cases where you want to
/// read from serializable data (like a physics shape definition) and construct something that
/// isn't serializable (like a handle to a physics body)
///
/// By default, all entities in the new world will be assigned a new Entity. `result_mappings`
/// (if not None) will be populated with the old/new Entities, which allows for mapping data
/// between the old and new world.
///
/// If you want to replace existing entities (for example to hot-reload data from a file,)
/// populate `replace_mappings`. For every entry in this map, the key must exist in the source
/// world and the value must exist in the destination world. All entities in the destination
/// world referenced by this map will be deleted, and the entities copied over will be assigned
/// the same entity. If these constraints are not met, this function will panic.
pub fn clone_from<
's,
CloneImplT: CloneImpl,
CloneImplResultT: CloneImplResult,
EntityReplacePolicyT: EntityReplacePolicy<'s>,
>(
&mut self,
src_world: &World,
clone_impl: &CloneImplT,
clone_impl_result: &mut CloneImplResultT,
entity_replace_policy: &'s EntityReplacePolicyT,
) {
let span = span!(Level::INFO, "CloneMerging worlds", source = src_world.id().0, destination = ?self.id());
let _guard = span.enter();
let src_storage = unsafe { &(*src_world.storage.get()) };
let dst_storage = unsafe { &mut (*self.storage.get()) };
// First check that all the src entities exist in the source world. We're assuming the
// source data will be available later to replace the data we're about to delete
for k in entity_replace_policy.src_entities() {
if !src_world.entity_allocator.is_alive(k) {
panic!("clone_from assumes all replace_mapping keys exist in the source world");
}
}
// Delete all the data associated with dst_entities. This leaves the
// associated entities in a dangling state, but we'll fix this later when we copy the
// data over
for entity_to_replace in entity_replace_policy.dst_entities() {
if self.entity_allocator.is_alive(entity_to_replace) {
let location = self
.entity_locations
.get(entity_to_replace)
.expect("Failed to get location of live entity");
self.delete_location(location);
} else {
panic!(
"clone_from assumes all replace_mapping values exist in the destination world"
);
}
}
// Iterate all archetypes in the src world
for src_archetype in src_storage.archetypes() {
let archetype_data = ArchetypeFilterData {
component_types: self.storage().component_types(),
tag_types: self.storage().tag_types(),
};
let dst_archetype_index = World::find_or_create_archetype_for_clone_move(
clone_impl,
src_archetype.description(),
archetype_data,
dst_storage,
);
// Do the clone_from for this archetype
dst_storage
.archetype_mut(dst_archetype_index)
.unwrap()
.clone_from(
&src_world,
src_archetype,
dst_archetype_index,
&self.entity_allocator,
&mut self.entity_locations,
clone_impl,
clone_impl_result,
entity_replace_policy,
);
}
}
/// This will *copy* the `src_entity` from `src_world` into this world. The logic to do the copy
/// is delegated to the `clone_impl` provided by the user. In addition to simple copying, it's
/// also possible to transform from one type to another. This is useful for cases where you want
/// to read from serializable data (like a physics shape definition) and construct something
/// that isn't serializable (like a handle to a physics body)
///
/// By default, the entity in the new world will be assigned a new Entity. The return value
/// indicates the Entity in the new world, which allows for mapping data the old and new world.
///
/// If you want to replace an existing entity (for example to hot-reload data from a file,)
/// populate `replace_mapping`. This entity must exist in the destination world. The entity in
/// the destination world will be deleted, and the entity copied over will be assigned
/// the same entity. If these constraints are not met, this function will panic.
pub fn clone_from_single<C: CloneImpl>(
&mut self,
src_world: &World,
src_entity: Entity,
clone_impl: &C,
replace_mapping: Option<Entity>,
) -> Entity {
let span = span!(Level::INFO, "CloneMergingSingle worlds", source = src_world.id().0, destination = ?self.id());
let _guard = span.enter();
let src_storage = unsafe { &(*src_world.storage.get()) };
let dst_storage = unsafe { &mut (*self.storage.get()) };
if !src_world.entity_allocator.is_alive(src_entity) {
panic!("src_entity not alive");
}
// Erase all entities that are referred to by value. The code following will update the location
// of all these entities to point to new, valid locations
if let Some(replace_mapping) = replace_mapping {
if self.entity_allocator.is_alive(replace_mapping) {
let location = self
.entity_locations
.get(replace_mapping)
.expect("Failed to get location of live entity");
self.delete_location(location);
} else {
panic!("clone_from_single assumes entity_mapping exists in the destination world");
}
}
let src_location = src_world.entity_locations.get(src_entity).unwrap();
let src_archetype = &src_storage.archetypes()[src_location.archetype()];
// Iterate all archetypes in the src world
let archetype_data = ArchetypeFilterData {
component_types: self.storage().component_types(),
tag_types: self.storage().tag_types(),
};
let dst_archetype_index = World::find_or_create_archetype_for_clone_move(
clone_impl,
src_archetype.description(),
archetype_data,
dst_storage,
);
// Do the clone_from for this archetype
dst_storage
.archetype_mut(dst_archetype_index)
.unwrap()
.clone_from_single(
&src_world,
src_archetype,
&src_location,
dst_archetype_index,
&self.entity_allocator,
&mut self.entity_locations,
clone_impl,
replace_mapping,
)
}
fn find_or_create_archetype_for_clone_move<C: CloneImpl>(
clone_impl: &C,
src_archetype_description: &ArchetypeDescription,
archetype_data: ArchetypeFilterData,
dst_storage: &mut Storage,
) -> ArchetypeIndex {
// Build the archetype that we will write into. The caller of this function provides an
// impl to do the clone, optionally transforming components from one type to another
let mut dst_archetype = ArchetypeDescription::default();
for (from_type_id, _from_meta) in src_archetype_description.components() {
let (into_type_id, into_meta) = clone_impl.map_component_type(*from_type_id);
dst_archetype.register_component_raw(into_type_id, into_meta);
}
// Find or create the archetype in the destination world
let matches = dst_archetype
.matches(archetype_data)
.matching_indices()
.next();
// If it doesn't exist, allocate it
if let Some(arch_index) = matches {
ArchetypeIndex(arch_index)
} else {
dst_storage.alloc_archetype(dst_archetype).0
}
}
fn find_archetype<T, C>(&self, tags: &mut T, components: &mut C) -> Option<ArchetypeIndex>
where
T: for<'a> Filter<ArchetypeFilterData<'a>>,
@ -900,6 +1124,155 @@ impl Default for World {
fn default() -> Self { Self::new() }
}
/// Describes how to handle a `clone_from`. Allows the user to transform components from one type
/// to another and provide their own implementation for cloning/transforming
pub trait CloneImpl {
/// When a component of the provided `component_type` is encountered, we will transfer data
/// from it into the returned component type. For a basic clone implementation, this function
/// should return the same type as was passed into it
fn map_component_type(
&self,
component_type_id: ComponentTypeId,
) -> (ComponentTypeId, ComponentMeta);
/// When called, the implementation should copy the data from src_data to dst_data. The
/// src_world and src_entities are provided so that other components on the same Entity can
/// be looked up. The dst_resources are provided so that any required side effects to resources
/// (like registering a physics body into a physics engine) can be implemented.
#[allow(clippy::too_many_arguments)]
fn clone_components(
&self,
src_world: &World,
src_component_storage: &ComponentStorage,
src_component_storage_indexes: core::ops::Range<ComponentIndex>,
src_type: ComponentTypeId,
src_entities: &[Entity],
dst_entities: &[Entity],
src_data: *const u8,
dst_data: *mut u8,
num_components: usize,
);
}
/// Used along with `CloneImpl`, allows receiving results from a `clone_from` or `clone_from_single`
/// call.
pub trait CloneImplResult {
/// For every entity that is copied, this function will be called, passing the entity in the
/// source and destination worlds
fn add_result(&mut self, src_entity: Entity, dst_entity: Entity);
}
/// Used along with `CloneImpl`, allows specifying that certain entities in the receiving world should
/// be replaced with entities from the source world.
///
/// A typical implementation of this trait would be to wrap a HashMap. `src_entities` would be
/// implemented by returning keys(), `dst_entities` would be implemented by returning values(), and
/// `get_dst_entity` would be implemented by returning the result of get(src_entity).
///
/// Default implementations provided in legion include:
/// * `NoneEntityReplacePolicy` - No entity replacement will occur
/// * `HashMapCloneImplResult` - Wraps the standard library's HashMap.
pub trait EntityReplacePolicy<'s> {
/// Returns all entities in the source world that will replace data in the destination world
///
/// # Safety
///
/// * All entities returned via the iterator must exist in the source world
/// * All entities that will be copied from the source world must be included in the
/// returned iterator.
fn src_entities<'a>(&'s self) -> Box<dyn Iterator<Item = Entity> + 'a>
where
's: 'a;
/// Returns all entities in the destination world that will be replaced
///
/// # Safety
///
/// * All entities returned via the iterator must exist in the destination world
/// * All entities that will be replaced in the destination world must be included in the
/// returned iterator
fn dst_entities<'a>(&'s self) -> Box<dyn Iterator<Item = Entity> + 'a>
where
's: 'a;
/// Returns the entity in the destination world that will be replaced by the given entity in the
/// source world, otherwise None if the entity in the source world should not replace anything.
///
/// # Safety
///
/// * All entities passed into this function that result in a non-None return value must be
/// included in the iterator returned by `src_entities`
/// * All entities returned by this function must be included in the iterator returned by
/// `dst_entities`
fn get_dst_entity(&self, src_entity: Entity) -> Option<Entity>;
}
/// Used to opt-out of receiving results from a `clone_from` or `clone_from_single` call
/// (See comments on `CloneImplResult`)
pub struct NoneCloneImplResult;
impl CloneImplResult for NoneCloneImplResult {
fn add_result(&mut self, _src_entity: Entity, _dst_entity: Entity) {
// do nothing
}
}
/// Used to opt-out of replacing entities during a `clone_from` or `clone_from_single` call.
/// (See comments on `EntityReplacePolicy`)
pub struct NoneEntityReplacePolicy;
impl<'s> EntityReplacePolicy<'s> for NoneEntityReplacePolicy {
fn src_entities<'a>(&self) -> Box<dyn Iterator<Item = Entity> + 'a>
where
's: 'a,
{
Box::new(std::iter::Empty::default())
}
fn dst_entities<'a>(&self) -> Box<dyn Iterator<Item = Entity> + 'a>
where
's: 'a,
{
Box::new(std::iter::Empty::default())
}
fn get_dst_entity(&self, _src_entity: Entity) -> Option<Entity> { None }
}
/// Default implementation of `CloneImplResult` that uses a hash map. Keys are entities in the
/// source world and values are entities in the destination world. (See comments on
/// `CloneImplResult`)
pub struct HashMapCloneImplResult<'m>(pub &'m mut HashMap<Entity, Entity>);
impl<'m> CloneImplResult for HashMapCloneImplResult<'m> {
fn add_result(&mut self, src_entity: Entity, dst_entity: Entity) {
self.0.insert(src_entity, dst_entity);
}
}
/// Default implementation of `EntityReplacePolicy` that uses a hash map. Keys are entities in the
/// source world and values are entities in the destination world. (See comments on
/// `EntityReplacePolicy`)
pub struct HashMapEntityReplacePolicy<'m>(pub &'m HashMap<Entity, Entity>);
impl<'m, 's> EntityReplacePolicy<'s> for HashMapEntityReplacePolicy<'m> {
fn src_entities<'a>(&'s self) -> Box<dyn Iterator<Item = Entity> + 'a>
where
's: 'a,
{
Box::new(self.0.keys().cloned())
}
fn dst_entities<'a>(&'s self) -> Box<dyn Iterator<Item = Entity> + 'a>
where
's: 'a,
{
Box::new(self.0.values().cloned())
}
fn get_dst_entity(&self, src_entity: Entity) -> Option<Entity> {
self.0.get(&src_entity).copied()
}
}
#[derive(Error, Debug)]
pub enum EntityMutationError {
#[error("entity does not exist")]
@ -1874,7 +2247,7 @@ mod tests {
}
#[test]
fn merge() {
fn move_from() {
let universe = Universe::new();
let mut a = universe.create_world();
let mut b = universe.create_world();
@ -1895,7 +2268,7 @@ mod tests {
],
)[0];
b.merge(a);
b.move_from(a);
assert_eq!(*b.get_component::<Pos>(entity_b).unwrap(), Pos(7., 8., 9.));
assert_eq!(*b.get_component::<Pos>(entity_a).unwrap(), Pos(1., 2., 3.));

View file

@ -1,6 +1,6 @@
use downcast_rs::{impl_downcast, Downcast};
use fxhash::FxHashMap;
use legion_core::borrow::DowncastTypename;
use legion_core::downcast_typename::DowncastTypename;
use legion_core::borrow::{AtomicRefCell, Ref, RefMut};
use legion_core::query::{Read, ReadOnly, Write};
use std::{

View file

@ -179,10 +179,10 @@
//! let mut world_a = universe.create_world();
//! let mut world_b = universe.create_world();
//!
//! // Merge all entities from `world_b` into `world_a`
//! // Move all entities from `world_b` into `world_a`
//! // Entity IDs are guarenteed to be unique across worlds and will
//! // remain unchanged across the merge.
//! world_a.merge(world_b);
//! world_a.move_from(world_b);
//! ```
//!
//! ### Chunk Iteration

View file

@ -2,7 +2,8 @@
use crate::ComponentRegistry;
use legion::{
entity::{EntityIndex, GuidEntityAllocator},
entity::EntityIndex,
guid_entity_allocator::GuidEntityAllocator,
prelude::*,
storage::{
ArchetypeDescription, ComponentMeta, ComponentResourceSet, ComponentTypeId, TagMeta,

View file

@ -7,6 +7,7 @@ fn main() {
// Registering components informs Bevy that they exist. This allows them to be used when loading/saving scenes
.register_component::<Test>()
.register_component::<Foo>()
// .add_startup_system(setup)
.add_startup_system(load_scene)
.run();
}