mirror of
https://github.com/bevyengine/bevy
synced 2024-11-27 07:00:18 +00:00
Update hashbrown to 0.15
This commit is contained in:
parent
d9282486e3
commit
4ca0902ab4
73 changed files with 330 additions and 264 deletions
|
@ -419,7 +419,7 @@ impl AnimationGraph {
|
||||||
Self {
|
Self {
|
||||||
graph,
|
graph,
|
||||||
root,
|
root,
|
||||||
mask_groups: HashMap::new(),
|
mask_groups: HashMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,9 +47,8 @@ use bevy_reflect::{
|
||||||
use bevy_time::Time;
|
use bevy_time::Time;
|
||||||
use bevy_transform::{prelude::Transform, TransformSystem};
|
use bevy_transform::{prelude::Transform, TransformSystem};
|
||||||
use bevy_utils::{
|
use bevy_utils::{
|
||||||
hashbrown::HashMap,
|
|
||||||
tracing::{trace, warn},
|
tracing::{trace, warn},
|
||||||
NoOpHash, TypeIdMap,
|
HashMap, NoOpHash, TypeIdMap,
|
||||||
};
|
};
|
||||||
use petgraph::graph::NodeIndex;
|
use petgraph::graph::NodeIndex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
|
@ -124,7 +124,7 @@ impl App {
|
||||||
Self {
|
Self {
|
||||||
sub_apps: SubApps {
|
sub_apps: SubApps {
|
||||||
main: SubApp::new(),
|
main: SubApp::new(),
|
||||||
sub_apps: HashMap::new(),
|
sub_apps: HashMap::default(),
|
||||||
},
|
},
|
||||||
runner: Box::new(run_once),
|
runner: Box::new(run_once),
|
||||||
}
|
}
|
||||||
|
|
|
@ -515,6 +515,8 @@ pub enum UntypedAssetConversionError {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use bevy_reflect::PartialReflect;
|
use bevy_reflect::PartialReflect;
|
||||||
|
use bevy_utils::FixedHasher;
|
||||||
|
use core::hash::BuildHasher;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -525,9 +527,7 @@ mod tests {
|
||||||
|
|
||||||
/// Simple utility to directly hash a value using a fixed hasher
|
/// Simple utility to directly hash a value using a fixed hasher
|
||||||
fn hash<T: Hash>(data: &T) -> u64 {
|
fn hash<T: Hash>(data: &T) -> u64 {
|
||||||
let mut hasher = bevy_utils::AHasher::default();
|
FixedHasher.hash_one(data)
|
||||||
data.hash(&mut hasher);
|
|
||||||
hasher.finish()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Typed and Untyped `Handles` should be equivalent to each other and themselves
|
/// Typed and Untyped `Handles` should be equivalent to each other and themselves
|
||||||
|
|
|
@ -417,11 +417,9 @@ mod tests {
|
||||||
|
|
||||||
/// Simple utility to directly hash a value using a fixed hasher
|
/// Simple utility to directly hash a value using a fixed hasher
|
||||||
fn hash<T: Hash>(data: &T) -> u64 {
|
fn hash<T: Hash>(data: &T) -> u64 {
|
||||||
use core::hash::Hasher;
|
use core::hash::BuildHasher;
|
||||||
|
|
||||||
let mut hasher = bevy_utils::AHasher::default();
|
bevy_utils::FixedHasher.hash_one(data)
|
||||||
data.hash(&mut hasher);
|
|
||||||
hasher.finish()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Typed and Untyped `AssetIds` should be equivalent to each other and themselves
|
/// Typed and Untyped `AssetIds` should be equivalent to each other and themselves
|
||||||
|
|
|
@ -44,7 +44,7 @@ impl<R: AssetReader> GatedReader<R> {
|
||||||
/// Creates a new [`GatedReader`], which wraps the given `reader`. Also returns a [`GateOpener`] which
|
/// Creates a new [`GatedReader`], which wraps the given `reader`. Also returns a [`GateOpener`] which
|
||||||
/// can be used to open "path gates" for this [`GatedReader`].
|
/// can be used to open "path gates" for this [`GatedReader`].
|
||||||
pub fn new(reader: R) -> (Self, GateOpener) {
|
pub fn new(reader: R) -> (Self, GateOpener) {
|
||||||
let gates = Arc::new(RwLock::new(HashMap::new()));
|
let gates = Arc::new(RwLock::new(HashMap::default()));
|
||||||
(
|
(
|
||||||
Self {
|
Self {
|
||||||
reader,
|
reader,
|
||||||
|
|
|
@ -343,7 +343,7 @@ impl AssetSourceBuilders {
|
||||||
/// Builds a new [`AssetSources`] collection. If `watch` is true, the unprocessed sources will watch for changes.
|
/// Builds a new [`AssetSources`] collection. If `watch` is true, the unprocessed sources will watch for changes.
|
||||||
/// If `watch_processed` is true, the processed sources will watch for changes.
|
/// If `watch_processed` is true, the processed sources will watch for changes.
|
||||||
pub fn build_sources(&mut self, watch: bool, watch_processed: bool) -> AssetSources {
|
pub fn build_sources(&mut self, watch: bool, watch_processed: bool) -> AssetSources {
|
||||||
let mut sources = HashMap::new();
|
let mut sources: HashMap<_, _> = HashMap::default();
|
||||||
for (id, source) in &mut self.sources {
|
for (id, source) in &mut self.sources {
|
||||||
if let Some(data) = source.build(
|
if let Some(data) = source.build(
|
||||||
AssetSourceId::Name(id.clone_owned()),
|
AssetSourceId::Name(id.clone_owned()),
|
||||||
|
|
|
@ -153,7 +153,7 @@ pub struct LoadedAsset<A: Asset> {
|
||||||
impl<A: Asset> LoadedAsset<A> {
|
impl<A: Asset> LoadedAsset<A> {
|
||||||
/// Create a new loaded asset. This will use [`VisitAssetDependencies`](crate::VisitAssetDependencies) to populate `dependencies`.
|
/// Create a new loaded asset. This will use [`VisitAssetDependencies`](crate::VisitAssetDependencies) to populate `dependencies`.
|
||||||
pub fn new_with_dependencies(value: A, meta: Option<Box<dyn AssetMetaDyn>>) -> Self {
|
pub fn new_with_dependencies(value: A, meta: Option<Box<dyn AssetMetaDyn>>) -> Self {
|
||||||
let mut dependencies = HashSet::new();
|
let mut dependencies: HashSet<_> = HashSet::default();
|
||||||
value.visit_dependencies(&mut |id| {
|
value.visit_dependencies(&mut |id| {
|
||||||
dependencies.insert(id);
|
dependencies.insert(id);
|
||||||
});
|
});
|
||||||
|
|
|
@ -395,10 +395,10 @@ impl AssetInfos {
|
||||||
|
|
||||||
loaded_asset.value.insert(loaded_asset_id, world);
|
loaded_asset.value.insert(loaded_asset_id, world);
|
||||||
let mut loading_deps = loaded_asset.dependencies;
|
let mut loading_deps = loaded_asset.dependencies;
|
||||||
let mut failed_deps = HashSet::new();
|
let mut failed_deps: HashSet<_> = HashSet::default();
|
||||||
let mut dep_error = None;
|
let mut dep_error = None;
|
||||||
let mut loading_rec_deps = loading_deps.clone();
|
let mut loading_rec_deps = loading_deps.clone();
|
||||||
let mut failed_rec_deps = HashSet::new();
|
let mut failed_rec_deps: HashSet<_> = HashSet::default();
|
||||||
let mut rec_dep_error = None;
|
let mut rec_dep_error = None;
|
||||||
loading_deps.retain(|dep_id| {
|
loading_deps.retain(|dep_id| {
|
||||||
if let Some(dep_info) = self.get_mut(*dep_id) {
|
if let Some(dep_info) = self.get_mut(*dep_id) {
|
||||||
|
|
|
@ -1544,7 +1544,7 @@ pub fn handle_internal_asset_events(world: &mut World) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut paths_to_reload = HashSet::new();
|
let mut paths_to_reload: HashSet<_> = HashSet::default();
|
||||||
let mut handle_event = |source: AssetSourceId<'static>, event: AssetSourceEvent| {
|
let mut handle_event = |source: AssetSourceId<'static>, event: AssetSourceEvent| {
|
||||||
match event {
|
match event {
|
||||||
// TODO: if the asset was processed and the processed file was changed, the first modified event
|
// TODO: if the asset was processed and the processed file was changed, the first modified event
|
||||||
|
|
|
@ -7,9 +7,9 @@ use alloc::borrow::Cow;
|
||||||
use bevy_reflect::std_traits::ReflectDefault;
|
use bevy_reflect::std_traits::ReflectDefault;
|
||||||
#[cfg(feature = "bevy_reflect")]
|
#[cfg(feature = "bevy_reflect")]
|
||||||
use bevy_reflect::Reflect;
|
use bevy_reflect::Reflect;
|
||||||
use bevy_utils::AHasher;
|
use bevy_utils::FixedHasher;
|
||||||
use core::{
|
use core::{
|
||||||
hash::{Hash, Hasher},
|
hash::{BuildHasher, Hash, Hasher},
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -80,9 +80,7 @@ impl Name {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_hash(&mut self) {
|
fn update_hash(&mut self) {
|
||||||
let mut hasher = AHasher::default();
|
self.hash = FixedHasher.hash_one(&self.name);
|
||||||
self.name.hash(&mut hasher);
|
|
||||||
self.hash = hasher.finish();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -405,7 +405,7 @@ pub fn prepare_core_2d_depth_textures(
|
||||||
opaque_2d_phases: Res<ViewBinnedRenderPhases<Opaque2d>>,
|
opaque_2d_phases: Res<ViewBinnedRenderPhases<Opaque2d>>,
|
||||||
views_2d: Query<(Entity, &ExtractedCamera, &Msaa), (With<Camera2d>,)>,
|
views_2d: Query<(Entity, &ExtractedCamera, &Msaa), (With<Camera2d>,)>,
|
||||||
) {
|
) {
|
||||||
let mut textures = HashMap::default();
|
let mut textures: HashMap<_, _> = HashMap::default();
|
||||||
for (view, camera, msaa) in &views_2d {
|
for (view, camera, msaa) in &views_2d {
|
||||||
if !opaque_2d_phases.contains_key(&view) || !transparent_2d_phases.contains_key(&view) {
|
if !opaque_2d_phases.contains_key(&view) || !transparent_2d_phases.contains_key(&view) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -633,7 +633,7 @@ pub fn prepare_core_3d_depth_textures(
|
||||||
&Msaa,
|
&Msaa,
|
||||||
)>,
|
)>,
|
||||||
) {
|
) {
|
||||||
let mut render_target_usage = HashMap::default();
|
let mut render_target_usage: HashMap<_, _> = HashMap::default();
|
||||||
for (view, camera, depth_prepass, camera_3d, _msaa) in &views_3d {
|
for (view, camera, depth_prepass, camera_3d, _msaa) in &views_3d {
|
||||||
if !opaque_3d_phases.contains_key(&view)
|
if !opaque_3d_phases.contains_key(&view)
|
||||||
|| !alpha_mask_3d_phases.contains_key(&view)
|
|| !alpha_mask_3d_phases.contains_key(&view)
|
||||||
|
@ -655,7 +655,7 @@ pub fn prepare_core_3d_depth_textures(
|
||||||
.or_insert_with(|| usage);
|
.or_insert_with(|| usage);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut textures = HashMap::default();
|
let mut textures: HashMap<_, _> = HashMap::default();
|
||||||
for (entity, camera, _, camera_3d, msaa) in &views_3d {
|
for (entity, camera, _, camera_3d, msaa) in &views_3d {
|
||||||
let Some(physical_target_size) = camera.physical_target_size else {
|
let Some(physical_target_size) = camera.physical_target_size else {
|
||||||
continue;
|
continue;
|
||||||
|
@ -718,7 +718,7 @@ pub fn prepare_core_3d_transmission_textures(
|
||||||
transparent_3d_phases: Res<ViewSortedRenderPhases<Transparent3d>>,
|
transparent_3d_phases: Res<ViewSortedRenderPhases<Transparent3d>>,
|
||||||
views_3d: Query<(Entity, &ExtractedCamera, &Camera3d, &ExtractedView)>,
|
views_3d: Query<(Entity, &ExtractedCamera, &Camera3d, &ExtractedView)>,
|
||||||
) {
|
) {
|
||||||
let mut textures = HashMap::default();
|
let mut textures: HashMap<_, _> = HashMap::default();
|
||||||
for (entity, camera, camera_3d, view) in &views_3d {
|
for (entity, camera, camera_3d, view) in &views_3d {
|
||||||
if !opaque_3d_phases.contains_key(&entity)
|
if !opaque_3d_phases.contains_key(&entity)
|
||||||
|| !alpha_mask_3d_phases.contains_key(&entity)
|
|| !alpha_mask_3d_phases.contains_key(&entity)
|
||||||
|
@ -826,11 +826,11 @@ pub fn prepare_prepass_textures(
|
||||||
Has<DeferredPrepass>,
|
Has<DeferredPrepass>,
|
||||||
)>,
|
)>,
|
||||||
) {
|
) {
|
||||||
let mut depth_textures = HashMap::default();
|
let mut depth_textures: HashMap<_, _> = HashMap::default();
|
||||||
let mut normal_textures = HashMap::default();
|
let mut normal_textures: HashMap<_, _> = HashMap::default();
|
||||||
let mut deferred_textures = HashMap::default();
|
let mut deferred_textures: HashMap<_, _> = HashMap::default();
|
||||||
let mut deferred_lighting_id_textures = HashMap::default();
|
let mut deferred_lighting_id_textures: HashMap<_, _> = HashMap::default();
|
||||||
let mut motion_vectors_textures = HashMap::default();
|
let mut motion_vectors_textures: HashMap<_, _> = HashMap::default();
|
||||||
for (
|
for (
|
||||||
entity,
|
entity,
|
||||||
camera,
|
camera,
|
||||||
|
|
|
@ -159,7 +159,7 @@ fn configure_depth_texture_usages(
|
||||||
|
|
||||||
// Find all the render target that potentially uses OIT
|
// Find all the render target that potentially uses OIT
|
||||||
let primary_window = p.get_single().ok();
|
let primary_window = p.get_single().ok();
|
||||||
let mut render_target_has_oit = HashSet::new();
|
let mut render_target_has_oit: HashSet<_> = HashSet::default();
|
||||||
for (camera, has_oit) in &cameras {
|
for (camera, has_oit) in &cameras {
|
||||||
if has_oit {
|
if has_oit {
|
||||||
render_target_has_oit.insert(camera.target.normalize(primary_window));
|
render_target_has_oit.insert(camera.target.normalize(primary_window));
|
||||||
|
|
|
@ -43,7 +43,7 @@ fn prepare_view_upscaling_pipelines(
|
||||||
blit_pipeline: Res<BlitPipeline>,
|
blit_pipeline: Res<BlitPipeline>,
|
||||||
view_targets: Query<(Entity, &ViewTarget, Option<&ExtractedCamera>)>,
|
view_targets: Query<(Entity, &ViewTarget, Option<&ExtractedCamera>)>,
|
||||||
) {
|
) {
|
||||||
let mut output_textures = HashSet::new();
|
let mut output_textures: HashSet<_> = HashSet::default();
|
||||||
for (entity, view_target, camera) in view_targets.iter() {
|
for (entity, view_target, camera) in view_targets.iter() {
|
||||||
let out_texture_id = view_target.out_texture().id();
|
let out_texture_id = view_target.out_texture().id();
|
||||||
let blend_state = if let Some(extracted_camera) = camera {
|
let blend_state = if let Some(extracted_camera) = camera {
|
||||||
|
|
|
@ -52,7 +52,7 @@ struct DrawnLines {
|
||||||
impl DrawnLines {
|
impl DrawnLines {
|
||||||
fn new(width: f32) -> Self {
|
fn new(width: f32) -> Self {
|
||||||
DrawnLines {
|
DrawnLines {
|
||||||
lines: HashMap::new(),
|
lines: HashMap::default(),
|
||||||
width,
|
width,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use core::hash::{Hash, Hasher};
|
||||||
|
|
||||||
use bevy_app::{App, SubApp};
|
use bevy_app::{App, SubApp};
|
||||||
use bevy_ecs::system::{Deferred, Res, Resource, SystemBuffer, SystemParam};
|
use bevy_ecs::system::{Deferred, Res, Resource, SystemBuffer, SystemParam};
|
||||||
use bevy_utils::{hashbrown::HashMap, Duration, Instant, PassHash};
|
use bevy_utils::{Duration, HashMap, Instant, PassHash};
|
||||||
use const_fnv1a_hash::fnv1a_hash_str_64;
|
use const_fnv1a_hash::fnv1a_hash_str_64;
|
||||||
|
|
||||||
use crate::DEFAULT_MAX_HISTORY_LENGTH;
|
use crate::DEFAULT_MAX_HISTORY_LENGTH;
|
||||||
|
|
|
@ -402,7 +402,7 @@ impl Archetype {
|
||||||
// component in the `table_components` vector
|
// component in the `table_components` vector
|
||||||
component_index
|
component_index
|
||||||
.entry(component_id)
|
.entry(component_id)
|
||||||
.or_insert_with(HashMap::new)
|
.or_default()
|
||||||
.insert(id, ArchetypeRecord { column: Some(idx) });
|
.insert(id, ArchetypeRecord { column: Some(idx) });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -420,7 +420,7 @@ impl Archetype {
|
||||||
);
|
);
|
||||||
component_index
|
component_index
|
||||||
.entry(component_id)
|
.entry(component_id)
|
||||||
.or_insert_with(HashMap::new)
|
.or_default()
|
||||||
.insert(id, ArchetypeRecord { column: None });
|
.insert(id, ArchetypeRecord { column: None });
|
||||||
}
|
}
|
||||||
Self {
|
Self {
|
||||||
|
|
|
@ -380,7 +380,7 @@ impl BundleInfo {
|
||||||
|
|
||||||
if deduped.len() != component_ids.len() {
|
if deduped.len() != component_ids.len() {
|
||||||
// TODO: Replace with `Vec::partition_dedup` once https://github.com/rust-lang/rust/issues/54279 is stabilized
|
// TODO: Replace with `Vec::partition_dedup` once https://github.com/rust-lang/rust/issues/54279 is stabilized
|
||||||
let mut seen = HashSet::new();
|
let mut seen: HashSet<_> = HashSet::default();
|
||||||
let mut dups = Vec::new();
|
let mut dups = Vec::new();
|
||||||
for id in component_ids {
|
for id in component_ids {
|
||||||
if !seen.insert(id) {
|
if !seen.insert(id) {
|
||||||
|
@ -1421,8 +1421,11 @@ impl Bundles {
|
||||||
.or_insert_with(|| {
|
.or_insert_with(|| {
|
||||||
let (id, storages) =
|
let (id, storages) =
|
||||||
initialize_dynamic_bundle(bundle_infos, components, Vec::from(component_ids));
|
initialize_dynamic_bundle(bundle_infos, components, Vec::from(component_ids));
|
||||||
|
// SAFETY: We know the ID is unique
|
||||||
|
unsafe {
|
||||||
self.dynamic_bundle_storages
|
self.dynamic_bundle_storages
|
||||||
.insert_unique_unchecked(id, storages);
|
.insert_unique_unchecked(id, storages);
|
||||||
|
}
|
||||||
(component_ids.into(), id)
|
(component_ids.into(), id)
|
||||||
});
|
});
|
||||||
*bundle_id
|
*bundle_id
|
||||||
|
|
|
@ -28,7 +28,8 @@ impl BuildHasher for EntityHash {
|
||||||
///
|
///
|
||||||
/// If you have an unusual case -- say all your indices are multiples of 256
|
/// If you have an unusual case -- say all your indices are multiples of 256
|
||||||
/// or most of the entities are dead generations -- then you might want also to
|
/// or most of the entities are dead generations -- then you might want also to
|
||||||
/// try [`AHasher`](bevy_utils::AHasher) for a slower hash computation but fewer lookup conflicts.
|
/// try [`DefaultHasher`](bevy_utils::DefaultHasher) for a slower hash
|
||||||
|
/// computation but fewer lookup conflicts.
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct EntityHasher {
|
pub struct EntityHasher {
|
||||||
hash: u64,
|
hash: u64,
|
||||||
|
|
|
@ -113,7 +113,7 @@ mod tests {
|
||||||
let mut entity_map = EntityHashMap::<Entity>::default();
|
let mut entity_map = EntityHashMap::<Entity>::default();
|
||||||
let mut remapped = Foo {
|
let mut remapped = Foo {
|
||||||
ordered: vec![],
|
ordered: vec![],
|
||||||
unordered: HashSet::new(),
|
unordered: HashSet::default(),
|
||||||
single: Entity::PLACEHOLDER,
|
single: Entity::PLACEHOLDER,
|
||||||
not_an_entity: foo.not_an_entity.clone(),
|
not_an_entity: foo.not_an_entity.clone(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -164,8 +164,8 @@ impl<T: ?Sized> Default for Interner<T> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use core::hash::{Hash, Hasher};
|
use bevy_utils::FixedHasher;
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use core::hash::{BuildHasher, Hash, Hasher};
|
||||||
|
|
||||||
use crate::intern::{Internable, Interned, Interner};
|
use crate::intern::{Internable, Interned, Interner};
|
||||||
|
|
||||||
|
@ -250,13 +250,8 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(a, b);
|
assert_eq!(a, b);
|
||||||
|
|
||||||
let mut hasher = DefaultHasher::default();
|
let hash_a = FixedHasher.hash_one(a);
|
||||||
a.hash(&mut hasher);
|
let hash_b = FixedHasher.hash_one(b);
|
||||||
let hash_a = hasher.finish();
|
|
||||||
|
|
||||||
let mut hasher = DefaultHasher::default();
|
|
||||||
b.hash(&mut hasher);
|
|
||||||
let hash_b = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(hash_a, hash_b);
|
assert_eq!(hash_a, hash_b);
|
||||||
}
|
}
|
||||||
|
|
|
@ -417,7 +417,7 @@ mod tests {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
let e = world.spawn((TableStored("abc"), A(123))).id();
|
let e = world.spawn((TableStored("abc"), A(123))).id();
|
||||||
let f = world.spawn((TableStored("def"), A(456), B(1))).id();
|
let f = world.spawn((TableStored("def"), A(456), B(1))).id();
|
||||||
let mut results = HashSet::new();
|
let mut results: HashSet<_> = HashSet::default();
|
||||||
world
|
world
|
||||||
.query::<(Entity, &A)>()
|
.query::<(Entity, &A)>()
|
||||||
.iter(&world)
|
.iter(&world)
|
||||||
|
@ -594,7 +594,9 @@ mod tests {
|
||||||
.collect::<HashSet<_>>();
|
.collect::<HashSet<_>>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ents,
|
ents,
|
||||||
HashSet::from([(e, None, A(123)), (f, Some(SparseStored(1)), A(456))])
|
[(e, None, A(123)), (f, Some(SparseStored(1)), A(456))]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -626,7 +628,9 @@ mod tests {
|
||||||
.iter(&world)
|
.iter(&world)
|
||||||
.map(|(e, &i, &b)| (e, i, b))
|
.map(|(e, &i, &b)| (e, i, b))
|
||||||
.collect::<HashSet<_>>(),
|
.collect::<HashSet<_>>(),
|
||||||
HashSet::from([(e1, A(1), B(3)), (e2, A(2), B(4))])
|
[(e1, A(1), B(3)), (e2, A(2), B(4))]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(world.entity_mut(e1).take::<A>(), Some(A(1)));
|
assert_eq!(world.entity_mut(e1).take::<A>(), Some(A(1)));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -643,7 +647,9 @@ mod tests {
|
||||||
.iter(&world)
|
.iter(&world)
|
||||||
.map(|(e, &B(b), &TableStored(s))| (e, b, s))
|
.map(|(e, &B(b), &TableStored(s))| (e, b, s))
|
||||||
.collect::<HashSet<_>>(),
|
.collect::<HashSet<_>>(),
|
||||||
HashSet::from([(e2, 4, "xyz"), (e1, 3, "abc")])
|
[(e2, 4, "xyz"), (e1, 3, "abc")]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
world.entity_mut(e1).insert(A(43));
|
world.entity_mut(e1).insert(A(43));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -652,7 +658,9 @@ mod tests {
|
||||||
.iter(&world)
|
.iter(&world)
|
||||||
.map(|(e, &i, &b)| (e, i, b))
|
.map(|(e, &i, &b)| (e, i, b))
|
||||||
.collect::<HashSet<_>>(),
|
.collect::<HashSet<_>>(),
|
||||||
HashSet::from([(e2, A(2), B(4)), (e1, A(43), B(3))])
|
[(e2, A(2), B(4)), (e1, A(43), B(3))]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
world.entity_mut(e1).insert(C);
|
world.entity_mut(e1).insert(C);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -950,7 +958,7 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<A>>(&mut world),
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
HashSet::from([e1, e3])
|
[e1, e3].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
|
|
||||||
// ensure changing an entity's archetypes also moves its changed state
|
// ensure changing an entity's archetypes also moves its changed state
|
||||||
|
@ -958,7 +966,7 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<A>>(&mut world),
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
HashSet::from([e3, e1]),
|
[e3, e1].into_iter().collect::<HashSet<_>>(),
|
||||||
"changed entities list should not change"
|
"changed entities list should not change"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -967,7 +975,7 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<A>>(&mut world),
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
HashSet::from([e3, e1]),
|
[e3, e1].into_iter().collect::<HashSet<_>>(),
|
||||||
"changed entities list should not change"
|
"changed entities list should not change"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -975,7 +983,7 @@ mod tests {
|
||||||
assert!(world.despawn(e2));
|
assert!(world.despawn(e2));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<A>>(&mut world),
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
HashSet::from([e3, e1]),
|
[e3, e1].into_iter().collect::<HashSet<_>>(),
|
||||||
"changed entities list should not change"
|
"changed entities list should not change"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -983,7 +991,7 @@ mod tests {
|
||||||
assert!(world.despawn(e1));
|
assert!(world.despawn(e1));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<A>>(&mut world),
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
HashSet::from([e3]),
|
[e3].into_iter().collect::<HashSet<_>>(),
|
||||||
"e1 should no longer be returned"
|
"e1 should no longer be returned"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -994,11 +1002,20 @@ mod tests {
|
||||||
let e4 = world.spawn_empty().id();
|
let e4 = world.spawn_empty().id();
|
||||||
|
|
||||||
world.entity_mut(e4).insert(A(0));
|
world.entity_mut(e4).insert(A(0));
|
||||||
assert_eq!(get_filtered::<Changed<A>>(&mut world), HashSet::from([e4]));
|
assert_eq!(
|
||||||
assert_eq!(get_filtered::<Added<A>>(&mut world), HashSet::from([e4]));
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
get_filtered::<Added<A>>(&mut world),
|
||||||
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
world.entity_mut(e4).insert(A(1));
|
world.entity_mut(e4).insert(A(1));
|
||||||
assert_eq!(get_filtered::<Changed<A>>(&mut world), HashSet::from([e4]));
|
assert_eq!(
|
||||||
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
world.clear_trackers();
|
world.clear_trackers();
|
||||||
|
|
||||||
|
@ -1007,9 +1024,18 @@ mod tests {
|
||||||
world.entity_mut(e4).insert((A(0), B(0)));
|
world.entity_mut(e4).insert((A(0), B(0)));
|
||||||
|
|
||||||
assert!(get_filtered::<Added<A>>(&mut world).is_empty());
|
assert!(get_filtered::<Added<A>>(&mut world).is_empty());
|
||||||
assert_eq!(get_filtered::<Changed<A>>(&mut world), HashSet::from([e4]));
|
assert_eq!(
|
||||||
assert_eq!(get_filtered::<Added<B>>(&mut world), HashSet::from([e4]));
|
get_filtered::<Changed<A>>(&mut world),
|
||||||
assert_eq!(get_filtered::<Changed<B>>(&mut world), HashSet::from([e4]));
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
get_filtered::<Added<B>>(&mut world),
|
||||||
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
get_filtered::<Changed<B>>(&mut world),
|
||||||
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1041,19 +1067,19 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e1, e3])
|
[e1, e3].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
|
|
||||||
// ensure changing an entity's archetypes also moves its changed state
|
// ensure changing an entity's archetypes also moves its changed state
|
||||||
world.entity_mut(e1).insert(C);
|
world.entity_mut(e1).insert(C);
|
||||||
|
|
||||||
assert_eq!(get_filtered::<Changed<SparseStored>>(&mut world), HashSet::from([e3, e1]), "changed entities list should not change (although the order will due to archetype moves)");
|
assert_eq!(get_filtered::<Changed<SparseStored>>(&mut world), [e3, e1].into_iter().collect::<HashSet<_>>(), "changed entities list should not change (although the order will due to archetype moves)");
|
||||||
|
|
||||||
// spawning a new SparseStored entity should not change existing changed state
|
// spawning a new SparseStored entity should not change existing changed state
|
||||||
world.entity_mut(e1).insert(SparseStored(0));
|
world.entity_mut(e1).insert(SparseStored(0));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e3, e1]),
|
[e3, e1].into_iter().collect::<HashSet<_>>(),
|
||||||
"changed entities list should not change"
|
"changed entities list should not change"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1061,7 +1087,7 @@ mod tests {
|
||||||
assert!(world.despawn(e2));
|
assert!(world.despawn(e2));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e3, e1]),
|
[e3, e1].into_iter().collect::<HashSet<_>>(),
|
||||||
"changed entities list should not change"
|
"changed entities list should not change"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1069,7 +1095,7 @@ mod tests {
|
||||||
assert!(world.despawn(e1));
|
assert!(world.despawn(e1));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e3]),
|
[e3].into_iter().collect::<HashSet<_>>(),
|
||||||
"e1 should no longer be returned"
|
"e1 should no longer be returned"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1082,17 +1108,17 @@ mod tests {
|
||||||
world.entity_mut(e4).insert(SparseStored(0));
|
world.entity_mut(e4).insert(SparseStored(0));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e4])
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Added<SparseStored>>(&mut world),
|
get_filtered::<Added<SparseStored>>(&mut world),
|
||||||
HashSet::from([e4])
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
|
|
||||||
world.entity_mut(e4).insert(A(1));
|
world.entity_mut(e4).insert(A(1));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e4])
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
|
|
||||||
world.clear_trackers();
|
world.clear_trackers();
|
||||||
|
@ -1104,7 +1130,7 @@ mod tests {
|
||||||
assert!(get_filtered::<Added<SparseStored>>(&mut world).is_empty());
|
assert!(get_filtered::<Added<SparseStored>>(&mut world).is_empty());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_filtered::<Changed<SparseStored>>(&mut world),
|
get_filtered::<Changed<SparseStored>>(&mut world),
|
||||||
HashSet::from([e4])
|
[e4].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1288,7 +1314,12 @@ mod tests {
|
||||||
.iter(&world)
|
.iter(&world)
|
||||||
.map(|(a, b)| (a.0, b.0))
|
.map(|(a, b)| (a.0, b.0))
|
||||||
.collect::<HashSet<_>>();
|
.collect::<HashSet<_>>();
|
||||||
assert_eq!(results, HashSet::from([(1, "1"), (2, "2"), (3, "3"),]));
|
assert_eq!(
|
||||||
|
results,
|
||||||
|
[(1, "1"), (2, "2"), (3, "3"),]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
let removed_bundle = world.entity_mut(e2).take::<(B, TableStored)>().unwrap();
|
let removed_bundle = world.entity_mut(e2).take::<(B, TableStored)>().unwrap();
|
||||||
assert_eq!(removed_bundle, (B(2), TableStored("2")));
|
assert_eq!(removed_bundle, (B(2), TableStored("2")));
|
||||||
|
@ -1297,11 +1328,14 @@ mod tests {
|
||||||
.iter(&world)
|
.iter(&world)
|
||||||
.map(|(a, b)| (a.0, b.0))
|
.map(|(a, b)| (a.0, b.0))
|
||||||
.collect::<HashSet<_>>();
|
.collect::<HashSet<_>>();
|
||||||
assert_eq!(results, HashSet::from([(1, "1"), (3, "3"),]));
|
assert_eq!(
|
||||||
|
results,
|
||||||
|
[(1, "1"), (3, "3"),].into_iter().collect::<HashSet<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
let mut a_query = world.query::<&A>();
|
let mut a_query = world.query::<&A>();
|
||||||
let results = a_query.iter(&world).map(|a| a.0).collect::<HashSet<_>>();
|
let results = a_query.iter(&world).map(|a| a.0).collect::<HashSet<_>>();
|
||||||
assert_eq!(results, HashSet::from([1, 3, 2]));
|
assert_eq!(results, [1, 3, 2].into_iter().collect::<HashSet<_>>());
|
||||||
|
|
||||||
let entity_ref = world.entity(e2);
|
let entity_ref = world.entity(e2);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -116,7 +116,7 @@ impl<V: NodeTrait + Debug> Default for CheckGraphResults<V> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
reachable: FixedBitSet::new(),
|
reachable: FixedBitSet::new(),
|
||||||
connected: HashSet::new(),
|
connected: HashSet::default(),
|
||||||
disconnected: Vec::new(),
|
disconnected: Vec::new(),
|
||||||
transitive_edges: Vec::new(),
|
transitive_edges: Vec::new(),
|
||||||
transitive_reduction: DiGraphMap::new(),
|
transitive_reduction: DiGraphMap::new(),
|
||||||
|
@ -150,7 +150,7 @@ where
|
||||||
let n = graph.node_count();
|
let n = graph.node_count();
|
||||||
|
|
||||||
// build a copy of the graph where the nodes and edges appear in topsorted order
|
// build a copy of the graph where the nodes and edges appear in topsorted order
|
||||||
let mut map = HashMap::with_capacity(n);
|
let mut map: HashMap<_, _> = HashMap::with_capacity_and_hasher(n, Default::default());
|
||||||
let mut topsorted = DiGraphMap::<V, ()>::new();
|
let mut topsorted = DiGraphMap::<V, ()>::new();
|
||||||
// iterate nodes in topological order
|
// iterate nodes in topological order
|
||||||
for (i, &node) in topological_order.iter().enumerate() {
|
for (i, &node) in topological_order.iter().enumerate() {
|
||||||
|
@ -163,7 +163,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut reachable = FixedBitSet::with_capacity(n * n);
|
let mut reachable = FixedBitSet::with_capacity(n * n);
|
||||||
let mut connected = HashSet::new();
|
let mut connected: HashSet<_> = HashSet::default();
|
||||||
let mut disconnected = Vec::new();
|
let mut disconnected = Vec::new();
|
||||||
|
|
||||||
let mut transitive_edges = Vec::new();
|
let mut transitive_edges = Vec::new();
|
||||||
|
@ -272,15 +272,17 @@ where
|
||||||
// path of nodes that may form a cycle
|
// path of nodes that may form a cycle
|
||||||
let mut path = Vec::with_capacity(subgraph.node_count());
|
let mut path = Vec::with_capacity(subgraph.node_count());
|
||||||
// we mark nodes as "blocked" to avoid finding permutations of the same cycles
|
// we mark nodes as "blocked" to avoid finding permutations of the same cycles
|
||||||
let mut blocked = HashSet::with_capacity(subgraph.node_count());
|
let mut blocked: HashSet<_> =
|
||||||
|
HashSet::with_capacity_and_hasher(subgraph.node_count(), Default::default());
|
||||||
// connects nodes along path segments that can't be part of a cycle (given current root)
|
// connects nodes along path segments that can't be part of a cycle (given current root)
|
||||||
// those nodes can be unblocked at the same time
|
// those nodes can be unblocked at the same time
|
||||||
let mut unblock_together: HashMap<N, HashSet<N>> =
|
let mut unblock_together: HashMap<N, HashSet<N>> =
|
||||||
HashMap::with_capacity(subgraph.node_count());
|
HashMap::with_capacity_and_hasher(subgraph.node_count(), Default::default());
|
||||||
// stack for unblocking nodes
|
// stack for unblocking nodes
|
||||||
let mut unblock_stack = Vec::with_capacity(subgraph.node_count());
|
let mut unblock_stack = Vec::with_capacity(subgraph.node_count());
|
||||||
// nodes can be involved in multiple cycles
|
// nodes can be involved in multiple cycles
|
||||||
let mut maybe_in_more_cycles: HashSet<N> = HashSet::with_capacity(subgraph.node_count());
|
let mut maybe_in_more_cycles: HashSet<N> =
|
||||||
|
HashSet::with_capacity_and_hasher(subgraph.node_count(), Default::default());
|
||||||
// stack for DFS
|
// stack for DFS
|
||||||
let mut stack = Vec::with_capacity(subgraph.node_count());
|
let mut stack = Vec::with_capacity(subgraph.node_count());
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl Schedules {
|
||||||
/// Constructs an empty `Schedules` with zero initial capacity.
|
/// Constructs an empty `Schedules` with zero initial capacity.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: HashMap::new(),
|
inner: HashMap::default(),
|
||||||
ignored_scheduling_ambiguities: BTreeSet::new(),
|
ignored_scheduling_ambiguities: BTreeSet::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -625,18 +625,18 @@ impl ScheduleGraph {
|
||||||
system_conditions: Vec::new(),
|
system_conditions: Vec::new(),
|
||||||
system_sets: Vec::new(),
|
system_sets: Vec::new(),
|
||||||
system_set_conditions: Vec::new(),
|
system_set_conditions: Vec::new(),
|
||||||
system_set_ids: HashMap::new(),
|
system_set_ids: HashMap::default(),
|
||||||
uninit: Vec::new(),
|
uninit: Vec::new(),
|
||||||
hierarchy: Dag::new(),
|
hierarchy: Dag::new(),
|
||||||
dependency: Dag::new(),
|
dependency: Dag::new(),
|
||||||
ambiguous_with: UnGraphMap::new(),
|
ambiguous_with: UnGraphMap::new(),
|
||||||
ambiguous_with_all: HashSet::new(),
|
ambiguous_with_all: HashSet::default(),
|
||||||
conflicting_systems: Vec::new(),
|
conflicting_systems: Vec::new(),
|
||||||
anonymous_sets: 0,
|
anonymous_sets: 0,
|
||||||
changed: false,
|
changed: false,
|
||||||
settings: default(),
|
settings: default(),
|
||||||
no_sync_edges: BTreeSet::new(),
|
no_sync_edges: BTreeSet::new(),
|
||||||
auto_sync_node_ids: HashMap::new(),
|
auto_sync_node_ids: HashMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1153,7 +1153,8 @@ impl ScheduleGraph {
|
||||||
|
|
||||||
// calculate the number of sync points each sync point is from the beginning of the graph
|
// calculate the number of sync points each sync point is from the beginning of the graph
|
||||||
// use the same sync point if the distance is the same
|
// use the same sync point if the distance is the same
|
||||||
let mut distances: HashMap<usize, Option<u32>> = HashMap::with_capacity(topo.len());
|
let mut distances: HashMap<usize, Option<u32>> =
|
||||||
|
HashMap::with_capacity_and_hasher(topo.len(), Default::default());
|
||||||
for node in &topo {
|
for node in &topo {
|
||||||
let add_sync_after = self.systems[node.index()].get().unwrap().has_deferred();
|
let add_sync_after = self.systems[node.index()].get().unwrap().has_deferred();
|
||||||
|
|
||||||
|
@ -1230,8 +1231,9 @@ impl ScheduleGraph {
|
||||||
hierarchy_graph: &GraphMap<NodeId, (), Directed>,
|
hierarchy_graph: &GraphMap<NodeId, (), Directed>,
|
||||||
) -> (HashMap<NodeId, Vec<NodeId>>, HashMap<NodeId, FixedBitSet>) {
|
) -> (HashMap<NodeId, Vec<NodeId>>, HashMap<NodeId, FixedBitSet>) {
|
||||||
let mut set_systems: HashMap<NodeId, Vec<NodeId>> =
|
let mut set_systems: HashMap<NodeId, Vec<NodeId>> =
|
||||||
HashMap::with_capacity(self.system_sets.len());
|
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default());
|
||||||
let mut set_system_bitsets = HashMap::with_capacity(self.system_sets.len());
|
let mut set_system_bitsets =
|
||||||
|
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default());
|
||||||
for &id in hierarchy_topsort.iter().rev() {
|
for &id in hierarchy_topsort.iter().rev() {
|
||||||
if id.is_system() {
|
if id.is_system() {
|
||||||
continue;
|
continue;
|
||||||
|
@ -1925,7 +1927,7 @@ impl ScheduleGraph {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> {
|
fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> {
|
||||||
let mut sets = HashSet::new();
|
let mut sets: HashSet<_> = HashSet::default();
|
||||||
self.traverse_sets_containing_node(*id, &mut |set_id| {
|
self.traverse_sets_containing_node(*id, &mut |set_id| {
|
||||||
!self.system_sets[set_id.index()].is_system_type() && sets.insert(set_id)
|
!self.system_sets[set_id.index()].is_system_type() && sets.insert(set_id)
|
||||||
});
|
});
|
||||||
|
|
|
@ -716,7 +716,7 @@ impl<'w> EntityMut<'w> {
|
||||||
/// let mut entity_mut = world.entity_mut(entity);
|
/// let mut entity_mut = world.entity_mut(entity);
|
||||||
/// let mut ptrs = entity_mut.get_mut_by_id(&HashSet::from_iter([x_id, y_id]))
|
/// let mut ptrs = entity_mut.get_mut_by_id(&HashSet::from_iter([x_id, y_id]))
|
||||||
/// # .unwrap();
|
/// # .unwrap();
|
||||||
/// # let [mut x_ptr, mut y_ptr] = ptrs.get_many_mut([&x_id, &y_id]).unwrap();
|
/// # let [Some(mut x_ptr), Some(mut y_ptr)] = ptrs.get_many_mut([&x_id, &y_id]) else { unreachable!() };
|
||||||
/// # assert_eq!((unsafe { x_ptr.as_mut().deref_mut::<X>() }, unsafe { y_ptr.as_mut().deref_mut::<Y>() }), (&mut X(42), &mut Y(10)));
|
/// # assert_eq!((unsafe { x_ptr.as_mut().deref_mut::<X>() }, unsafe { y_ptr.as_mut().deref_mut::<Y>() }), (&mut X(42), &mut Y(10)));
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -3445,7 +3445,7 @@ unsafe impl DynamicComponentFetch for &'_ HashSet<ComponentId> {
|
||||||
self,
|
self,
|
||||||
cell: UnsafeEntityCell<'_>,
|
cell: UnsafeEntityCell<'_>,
|
||||||
) -> Result<Self::Ref<'_>, EntityComponentError> {
|
) -> Result<Self::Ref<'_>, EntityComponentError> {
|
||||||
let mut ptrs = HashMap::with_capacity(self.len());
|
let mut ptrs = HashMap::with_capacity_and_hasher(self.len(), Default::default());
|
||||||
for &id in self {
|
for &id in self {
|
||||||
ptrs.insert(
|
ptrs.insert(
|
||||||
id,
|
id,
|
||||||
|
@ -3460,7 +3460,7 @@ unsafe impl DynamicComponentFetch for &'_ HashSet<ComponentId> {
|
||||||
self,
|
self,
|
||||||
cell: UnsafeEntityCell<'_>,
|
cell: UnsafeEntityCell<'_>,
|
||||||
) -> Result<Self::Mut<'_>, EntityComponentError> {
|
) -> Result<Self::Mut<'_>, EntityComponentError> {
|
||||||
let mut ptrs = HashMap::with_capacity(self.len());
|
let mut ptrs = HashMap::with_capacity_and_hasher(self.len(), Default::default());
|
||||||
for &id in self {
|
for &id in self {
|
||||||
ptrs.insert(
|
ptrs.insert(
|
||||||
id,
|
id,
|
||||||
|
|
|
@ -1234,8 +1234,7 @@ impl World {
|
||||||
/// # use bevy_ecs::prelude::*;
|
/// # use bevy_ecs::prelude::*;
|
||||||
/// # use bevy_ecs::entity::EntityHash;
|
/// # use bevy_ecs::entity::EntityHash;
|
||||||
/// # use bevy_ecs::entity::EntityHashSet;
|
/// # use bevy_ecs::entity::EntityHashSet;
|
||||||
/// # use bevy_utils::hashbrown::HashSet;
|
/// # use bevy_utils::HashSet;
|
||||||
/// # use bevy_utils::hashbrown::hash_map::DefaultHashBuilder;
|
|
||||||
/// # let mut world = World::new();
|
/// # let mut world = World::new();
|
||||||
/// # let id1 = world.spawn_empty().id();
|
/// # let id1 = world.spawn_empty().id();
|
||||||
/// # let id2 = world.spawn_empty().id();
|
/// # let id2 = world.spawn_empty().id();
|
||||||
|
@ -3347,7 +3346,7 @@ impl World {
|
||||||
/// // probably use something like `ReflectFromPtr` in a real-world scenario.
|
/// // probably use something like `ReflectFromPtr` in a real-world scenario.
|
||||||
///
|
///
|
||||||
/// // Create the hash map that will store the closures for each resource type
|
/// // Create the hash map that will store the closures for each resource type
|
||||||
/// let mut closures: HashMap<TypeId, Box<dyn Fn(&Ptr<'_>)>> = HashMap::new();
|
/// let mut closures: HashMap<TypeId, Box<dyn Fn(&Ptr<'_>)>> = HashMap::default();
|
||||||
///
|
///
|
||||||
/// // Add closure for `A`
|
/// // Add closure for `A`
|
||||||
/// closures.insert(TypeId::of::<A>(), Box::new(|ptr| {
|
/// closures.insert(TypeId::of::<A>(), Box::new(|ptr| {
|
||||||
|
@ -3424,7 +3423,7 @@ impl World {
|
||||||
/// // probably use something like `ReflectFromPtr` in a real-world scenario.
|
/// // probably use something like `ReflectFromPtr` in a real-world scenario.
|
||||||
///
|
///
|
||||||
/// // Create the hash map that will store the mutator closures for each resource type
|
/// // Create the hash map that will store the mutator closures for each resource type
|
||||||
/// let mut mutators: HashMap<TypeId, Box<dyn Fn(&mut MutUntyped<'_>)>> = HashMap::new();
|
/// let mut mutators: HashMap<TypeId, Box<dyn Fn(&mut MutUntyped<'_>)>> = HashMap::default();
|
||||||
///
|
///
|
||||||
/// // Add mutator closure for `A`
|
/// // Add mutator closure for `A`
|
||||||
/// mutators.insert(TypeId::of::<A>(), Box::new(|mut_untyped| {
|
/// mutators.insert(TypeId::of::<A>(), Box::new(|mut_untyped| {
|
||||||
|
@ -4182,38 +4181,46 @@ mod tests {
|
||||||
let baz_id = TypeId::of::<Baz>();
|
let baz_id = TypeId::of::<Baz>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent0).collect()),
|
to_type_ids(world.inspect_entity(ent0).collect()),
|
||||||
[Some(foo_id), Some(bar_id), Some(baz_id)].into()
|
[Some(foo_id), Some(bar_id), Some(baz_id)]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent1).collect()),
|
to_type_ids(world.inspect_entity(ent1).collect()),
|
||||||
[Some(foo_id), Some(bar_id)].into()
|
[Some(foo_id), Some(bar_id)]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent2).collect()),
|
to_type_ids(world.inspect_entity(ent2).collect()),
|
||||||
[Some(bar_id), Some(baz_id)].into()
|
[Some(bar_id), Some(baz_id)]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent3).collect()),
|
to_type_ids(world.inspect_entity(ent3).collect()),
|
||||||
[Some(foo_id), Some(baz_id)].into()
|
[Some(foo_id), Some(baz_id)]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent4).collect()),
|
to_type_ids(world.inspect_entity(ent4).collect()),
|
||||||
[Some(foo_id)].into()
|
[Some(foo_id)].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent5).collect()),
|
to_type_ids(world.inspect_entity(ent5).collect()),
|
||||||
[Some(bar_id)].into()
|
[Some(bar_id)].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
to_type_ids(world.inspect_entity(ent6).collect()),
|
to_type_ids(world.inspect_entity(ent6).collect()),
|
||||||
[Some(baz_id)].into()
|
[Some(baz_id)].into_iter().collect::<HashSet<_>>()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn iterate_entities() {
|
fn iterate_entities() {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
let mut entity_counters = HashMap::new();
|
let mut entity_counters: HashMap<_, _> = HashMap::default();
|
||||||
|
|
||||||
let iterate_and_count_entities = |world: &World, entity_counters: &mut HashMap<_, _>| {
|
let iterate_and_count_entities = |world: &World, entity_counters: &mut HashMap<_, _>| {
|
||||||
entity_counters.clear();
|
entity_counters.clear();
|
||||||
|
|
|
@ -220,7 +220,7 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
.to_string();
|
.to_string();
|
||||||
let buffer_data = load_buffers(&gltf, load_context).await?;
|
let buffer_data = load_buffers(&gltf, load_context).await?;
|
||||||
|
|
||||||
let mut linear_textures = HashSet::default();
|
let mut linear_textures: HashSet<_> = HashSet::default();
|
||||||
|
|
||||||
for material in gltf.materials() {
|
for material in gltf.materials() {
|
||||||
if let Some(texture) = material.normal_texture() {
|
if let Some(texture) = material.normal_texture() {
|
||||||
|
@ -262,11 +262,11 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
|
|
||||||
#[cfg(feature = "bevy_animation")]
|
#[cfg(feature = "bevy_animation")]
|
||||||
let paths = {
|
let paths = {
|
||||||
let mut paths = HashMap::<usize, (usize, Vec<Name>)>::new();
|
let mut paths = HashMap::<usize, (usize, Vec<Name>)>::default();
|
||||||
for scene in gltf.scenes() {
|
for scene in gltf.scenes() {
|
||||||
for node in scene.nodes() {
|
for node in scene.nodes() {
|
||||||
let root_index = node.index();
|
let root_index = node.index();
|
||||||
paths_recur(node, &[], &mut paths, root_index, &mut HashSet::new());
|
paths_recur(node, &[], &mut paths, root_index, &mut HashSet::default());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
paths
|
paths
|
||||||
|
@ -279,8 +279,8 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
use bevy_math::{Quat, Vec4};
|
use bevy_math::{Quat, Vec4};
|
||||||
use gltf::animation::util::ReadOutputs;
|
use gltf::animation::util::ReadOutputs;
|
||||||
let mut animations = vec![];
|
let mut animations = vec![];
|
||||||
let mut named_animations = HashMap::default();
|
let mut named_animations: HashMap<_, _> = HashMap::default();
|
||||||
let mut animation_roots = HashSet::default();
|
let mut animation_roots: HashSet<_> = HashSet::default();
|
||||||
for animation in gltf.animations() {
|
for animation in gltf.animations() {
|
||||||
let mut animation_clip = AnimationClip::default();
|
let mut animation_clip = AnimationClip::default();
|
||||||
for channel in animation.channels() {
|
for channel in animation.channels() {
|
||||||
|
@ -567,7 +567,7 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut materials = vec![];
|
let mut materials = vec![];
|
||||||
let mut named_materials = HashMap::default();
|
let mut named_materials: HashMap<_, _> = HashMap::default();
|
||||||
// Only include materials in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_materials flag
|
// Only include materials in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_materials flag
|
||||||
if !settings.load_materials.is_empty() {
|
if !settings.load_materials.is_empty() {
|
||||||
// NOTE: materials must be loaded after textures because image load() calls will happen before load_with_settings, preventing is_srgb from being set properly
|
// NOTE: materials must be loaded after textures because image load() calls will happen before load_with_settings, preventing is_srgb from being set properly
|
||||||
|
@ -580,9 +580,9 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut meshes = vec![];
|
let mut meshes = vec![];
|
||||||
let mut named_meshes = HashMap::default();
|
let mut named_meshes: HashMap<_, _> = HashMap::default();
|
||||||
let mut meshes_on_skinned_nodes = HashSet::default();
|
let mut meshes_on_skinned_nodes: HashSet<_> = HashSet::default();
|
||||||
let mut meshes_on_non_skinned_nodes = HashSet::default();
|
let mut meshes_on_non_skinned_nodes: HashSet<_> = HashSet::default();
|
||||||
for gltf_node in gltf.nodes() {
|
for gltf_node in gltf.nodes() {
|
||||||
if gltf_node.skin().is_some() {
|
if gltf_node.skin().is_some() {
|
||||||
if let Some(mesh) = gltf_node.mesh() {
|
if let Some(mesh) = gltf_node.mesh() {
|
||||||
|
@ -747,10 +747,10 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut nodes = HashMap::<usize, Handle<GltfNode>>::new();
|
let mut nodes = HashMap::<usize, Handle<GltfNode>>::default();
|
||||||
let mut named_nodes = HashMap::new();
|
let mut named_nodes: HashMap<_, _> = HashMap::default();
|
||||||
let mut skins = vec![];
|
let mut skins = vec![];
|
||||||
let mut named_skins = HashMap::default();
|
let mut named_skins: HashMap<_, _> = HashMap::default();
|
||||||
for node in GltfTreeIterator::try_new(&gltf)? {
|
for node in GltfTreeIterator::try_new(&gltf)? {
|
||||||
let skin = node.skin().map(|skin| {
|
let skin = node.skin().map(|skin| {
|
||||||
let joints = skin
|
let joints = skin
|
||||||
|
@ -812,12 +812,12 @@ async fn load_gltf<'a, 'b, 'c>(
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut scenes = vec![];
|
let mut scenes = vec![];
|
||||||
let mut named_scenes = HashMap::default();
|
let mut named_scenes: HashMap<_, _> = HashMap::default();
|
||||||
let mut active_camera_found = false;
|
let mut active_camera_found = false;
|
||||||
for scene in gltf.scenes() {
|
for scene in gltf.scenes() {
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
let mut world = World::default();
|
let mut world = World::default();
|
||||||
let mut node_index_to_entity_map = HashMap::new();
|
let mut node_index_to_entity_map: HashMap<_, _> = HashMap::default();
|
||||||
let mut entity_to_skin_index_map = EntityHashMap::default();
|
let mut entity_to_skin_index_map = EntityHashMap::default();
|
||||||
let mut scene_load_context = load_context.begin_labeled_asset();
|
let mut scene_load_context = load_context.begin_labeled_asset();
|
||||||
|
|
||||||
|
@ -1868,7 +1868,7 @@ impl<'a> GltfTreeIterator<'a> {
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let mut nodes = Vec::new();
|
let mut nodes = Vec::new();
|
||||||
let mut warned_about_max_joints = HashSet::new();
|
let mut warned_about_max_joints: HashSet<_> = HashSet::default();
|
||||||
while let Some(index) = empty_children.pop_front() {
|
while let Some(index) = empty_children.pop_front() {
|
||||||
if let Some(skin) = unprocessed_nodes.get(&index).unwrap().0.skin() {
|
if let Some(skin) = unprocessed_nodes.get(&index).unwrap().0.skin() {
|
||||||
if skin.joints().len() > MAX_JOINTS && warned_about_max_joints.insert(skin.index())
|
if skin.joints().len() > MAX_JOINTS && warned_about_max_joints.insert(skin.index())
|
||||||
|
|
|
@ -1059,7 +1059,7 @@ mod tests {
|
||||||
use crate::{Mesh, MeshBuilder, Meshable, VertexAttributeValues};
|
use crate::{Mesh, MeshBuilder, Meshable, VertexAttributeValues};
|
||||||
|
|
||||||
fn count_distinct_positions(points: &[[f32; 3]]) -> usize {
|
fn count_distinct_positions(points: &[[f32; 3]]) -> usize {
|
||||||
let mut map = HashSet::new();
|
let mut map: HashSet<_> = HashSet::default();
|
||||||
for point in points {
|
for point in points {
|
||||||
map.insert(point.map(FloatOrd));
|
map.insert(point.map(FloatOrd));
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ use bevy_render::{
|
||||||
sync_world::RenderEntity,
|
sync_world::RenderEntity,
|
||||||
Extract,
|
Extract,
|
||||||
};
|
};
|
||||||
use bevy_utils::{hashbrown::HashSet, tracing::warn};
|
use bevy_utils::{tracing::warn, HashSet};
|
||||||
|
|
||||||
pub(crate) use crate::cluster::assign::assign_objects_to_clusters;
|
pub(crate) use crate::cluster::assign::assign_objects_to_clusters;
|
||||||
use crate::MeshPipeline;
|
use crate::MeshPipeline;
|
||||||
|
|
|
@ -631,7 +631,7 @@ where
|
||||||
fn new() -> RenderViewLightProbes<C> {
|
fn new() -> RenderViewLightProbes<C> {
|
||||||
RenderViewLightProbes {
|
RenderViewLightProbes {
|
||||||
binding_index_to_textures: vec![],
|
binding_index_to_textures: vec![],
|
||||||
cubemap_to_binding_index: HashMap::new(),
|
cubemap_to_binding_index: HashMap::default(),
|
||||||
render_light_probes: vec![],
|
render_light_probes: vec![],
|
||||||
view_light_probe_info: C::ViewLightProbeInfo::default(),
|
view_light_probe_info: C::ViewLightProbeInfo::default(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -273,7 +273,7 @@ fn find_connected_meshlets(
|
||||||
}
|
}
|
||||||
|
|
||||||
// For each meshlet pair, count how many vertices they share
|
// For each meshlet pair, count how many vertices they share
|
||||||
let mut meshlet_pair_to_shared_vertex_count = HashMap::new();
|
let mut meshlet_pair_to_shared_vertex_count = <HashMap<_, _>>::default();
|
||||||
for vertex_meshlet_ids in vertices_to_meshlets {
|
for vertex_meshlet_ids in vertices_to_meshlets {
|
||||||
for (meshlet_queue_id1, meshlet_queue_id2) in
|
for (meshlet_queue_id1, meshlet_queue_id2) in
|
||||||
vertex_meshlet_ids.into_iter().tuple_combinations()
|
vertex_meshlet_ids.into_iter().tuple_combinations()
|
||||||
|
|
|
@ -76,8 +76,8 @@ impl InstanceManager {
|
||||||
view_instance_visibility: EntityHashMap::default(),
|
view_instance_visibility: EntityHashMap::default(),
|
||||||
|
|
||||||
next_material_id: 0,
|
next_material_id: 0,
|
||||||
material_id_lookup: HashMap::new(),
|
material_id_lookup: HashMap::default(),
|
||||||
material_ids_present_in_scene: HashSet::new(),
|
material_ids_present_in_scene: HashSet::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ impl FromWorld for MeshletMeshManager {
|
||||||
"meshlet_simplification_errors",
|
"meshlet_simplification_errors",
|
||||||
render_device,
|
render_device,
|
||||||
),
|
),
|
||||||
meshlet_mesh_slices: HashMap::new(),
|
meshlet_mesh_slices: HashMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -954,7 +954,7 @@ pub fn extract_meshes_for_cpu_building(
|
||||||
render_mesh_instances.clear();
|
render_mesh_instances.clear();
|
||||||
for queue in render_mesh_instance_queues.iter_mut() {
|
for queue in render_mesh_instance_queues.iter_mut() {
|
||||||
for (entity, render_mesh_instance) in queue.drain(..) {
|
for (entity, render_mesh_instance) in queue.drain(..) {
|
||||||
render_mesh_instances.insert_unique_unchecked(entity.into(), render_mesh_instance);
|
render_mesh_instances.insert(entity.into(), render_mesh_instance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -226,7 +226,7 @@ pub fn update_interactions(
|
||||||
// need to be able to insert the interaction component on entities if they do not exist. To do
|
// need to be able to insert the interaction component on entities if they do not exist. To do
|
||||||
// so we need to know the final aggregated interaction state to avoid the scenario where we set
|
// so we need to know the final aggregated interaction state to avoid the scenario where we set
|
||||||
// an entity to `Pressed`, then overwrite that with a lower precedent like `Hovered`.
|
// an entity to `Pressed`, then overwrite that with a lower precedent like `Hovered`.
|
||||||
let mut new_interaction_state = HashMap::<Entity, PickingInteraction>::new();
|
let mut new_interaction_state = HashMap::<Entity, PickingInteraction>::default();
|
||||||
for (pointer, pointer_press, mut pointer_interaction) in &mut pointers {
|
for (pointer, pointer_press, mut pointer_interaction) in &mut pointers {
|
||||||
if let Some(pointers_hovered_entities) = hover_map.get(pointer) {
|
if let Some(pointers_hovered_entities) = hover_map.get(pointer) {
|
||||||
// Insert a sorted list of hit entities into the pointer's interaction component.
|
// Insert a sorted list of hit entities into the pointer's interaction component.
|
||||||
|
|
|
@ -34,6 +34,9 @@ bevy_reflect_derive = { path = "derive", version = "0.15.0-dev" }
|
||||||
bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" }
|
bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" }
|
||||||
bevy_ptr = { path = "../bevy_ptr", version = "0.15.0-dev" }
|
bevy_ptr = { path = "../bevy_ptr", version = "0.15.0-dev" }
|
||||||
|
|
||||||
|
# used by bevy-utils, but it also needs reflect impls
|
||||||
|
foldhash = { version = "0.1.3", default-features = false }
|
||||||
|
|
||||||
# other
|
# other
|
||||||
erased-serde = "0.4"
|
erased-serde = "0.4"
|
||||||
disqualified = "1.0"
|
disqualified = "1.0"
|
||||||
|
|
|
@ -21,7 +21,7 @@ impl SerializationDataDef {
|
||||||
/// Returns `Ok(Some(data))` if there are any fields needing to be skipped during serialization.
|
/// Returns `Ok(Some(data))` if there are any fields needing to be skipped during serialization.
|
||||||
/// Otherwise, returns `Ok(None)`.
|
/// Otherwise, returns `Ok(None)`.
|
||||||
pub fn new(fields: &[StructField<'_>]) -> Result<Option<Self>, syn::Error> {
|
pub fn new(fields: &[StructField<'_>]) -> Result<Option<Self>, syn::Error> {
|
||||||
let mut skipped = HashMap::default();
|
let mut skipped: HashMap<_, _> = HashMap::default();
|
||||||
|
|
||||||
for field in fields {
|
for field in fields {
|
||||||
match field.attrs.ignore {
|
match field.attrs.ignore {
|
||||||
|
|
8
crates/bevy_reflect/src/impls/foldhash.rs
Normal file
8
crates/bevy_reflect/src/impls/foldhash.rs
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
use crate::{self as bevy_reflect, impl_type_path};
|
||||||
|
|
||||||
|
impl_type_path!(::foldhash::fast::FoldHasher);
|
||||||
|
impl_type_path!(::foldhash::fast::FixedState);
|
||||||
|
impl_type_path!(::foldhash::fast::RandomState);
|
||||||
|
impl_type_path!(::foldhash::quality::FoldHasher);
|
||||||
|
impl_type_path!(::foldhash::quality::FixedState);
|
||||||
|
impl_type_path!(::foldhash::quality::RandomState);
|
|
@ -821,6 +821,7 @@ macro_rules! impl_reflect_for_hashmap {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_reflect_for_hashmap!(::std::collections::HashMap<K, V, S>);
|
impl_reflect_for_hashmap!(::std::collections::HashMap<K, V, S>);
|
||||||
|
impl_type_path!(::core::hash::BuildHasherDefault<H>);
|
||||||
impl_type_path!(::std::collections::hash_map::RandomState);
|
impl_type_path!(::std::collections::hash_map::RandomState);
|
||||||
impl_type_path!(::std::collections::HashMap<K, V, S>);
|
impl_type_path!(::std::collections::HashMap<K, V, S>);
|
||||||
#[cfg(feature = "functions")]
|
#[cfg(feature = "functions")]
|
||||||
|
@ -833,7 +834,6 @@ crate::func::macros::impl_function_traits!(::std::collections::HashMap<K, V, S>;
|
||||||
);
|
);
|
||||||
|
|
||||||
impl_reflect_for_hashmap!(bevy_utils::hashbrown::HashMap<K, V, S>);
|
impl_reflect_for_hashmap!(bevy_utils::hashbrown::HashMap<K, V, S>);
|
||||||
impl_type_path!(::bevy_utils::hashbrown::hash_map::DefaultHashBuilder);
|
|
||||||
impl_type_path!(::bevy_utils::hashbrown::HashMap<K, V, S>);
|
impl_type_path!(::bevy_utils::hashbrown::HashMap<K, V, S>);
|
||||||
#[cfg(feature = "functions")]
|
#[cfg(feature = "functions")]
|
||||||
crate::func::macros::impl_function_traits!(::bevy_utils::hashbrown::HashMap<K, V, S>;
|
crate::func::macros::impl_function_traits!(::bevy_utils::hashbrown::HashMap<K, V, S>;
|
||||||
|
@ -1047,7 +1047,7 @@ macro_rules! impl_reflect_for_hashset {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_type_path!(::bevy_utils::NoOpHash);
|
impl_type_path!(::bevy_utils::NoOpHash);
|
||||||
impl_type_path!(::bevy_utils::FixedState);
|
impl_type_path!(::bevy_utils::FixedHasher);
|
||||||
|
|
||||||
impl_reflect_for_hashset!(::std::collections::HashSet<V,S>);
|
impl_reflect_for_hashset!(::std::collections::HashSet<V,S>);
|
||||||
impl_type_path!(::std::collections::HashSet<V, S>);
|
impl_type_path!(::std::collections::HashSet<V, S>);
|
||||||
|
@ -2316,10 +2316,10 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_partial_eq_hash_map() {
|
fn should_partial_eq_hash_map() {
|
||||||
let mut a = HashMap::new();
|
let mut a: HashMap<_, _> = HashMap::default();
|
||||||
a.insert(0usize, 1.23_f64);
|
a.insert(0usize, 1.23_f64);
|
||||||
let b = a.clone();
|
let b = a.clone();
|
||||||
let mut c = HashMap::new();
|
let mut c: HashMap<_, _> = HashMap::default();
|
||||||
c.insert(0usize, 3.21_f64);
|
c.insert(0usize, 3.21_f64);
|
||||||
|
|
||||||
let a: &dyn PartialReflect = &a;
|
let a: &dyn PartialReflect = &a;
|
||||||
|
|
|
@ -296,7 +296,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_cast_mut() {
|
fn should_cast_mut() {
|
||||||
let mut value: HashSet<i32> = HashSet::new();
|
let mut value: HashSet<i32> = HashSet::default();
|
||||||
|
|
||||||
let result = value.reflect_mut().as_set();
|
let result = value.reflect_mut().as_set();
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
|
|
|
@ -576,6 +576,7 @@ mod type_path;
|
||||||
mod type_registry;
|
mod type_registry;
|
||||||
|
|
||||||
mod impls {
|
mod impls {
|
||||||
|
mod foldhash;
|
||||||
mod std;
|
mod std;
|
||||||
|
|
||||||
#[cfg(feature = "glam")]
|
#[cfg(feature = "glam")]
|
||||||
|
@ -1144,11 +1145,11 @@ mod tests {
|
||||||
#[derive(Reflect, Eq, PartialEq, Debug)]
|
#[derive(Reflect, Eq, PartialEq, Debug)]
|
||||||
struct Baz(String);
|
struct Baz(String);
|
||||||
|
|
||||||
let mut hash_map = HashMap::default();
|
let mut hash_map: HashMap<_, _> = HashMap::default();
|
||||||
hash_map.insert(1, 1);
|
hash_map.insert(1, 1);
|
||||||
hash_map.insert(2, 2);
|
hash_map.insert(2, 2);
|
||||||
|
|
||||||
let mut hash_map_baz = HashMap::default();
|
let mut hash_map_baz: HashMap<_, _> = HashMap::default();
|
||||||
hash_map_baz.insert(1, Bar { x: 0 });
|
hash_map_baz.insert(1, Bar { x: 0 });
|
||||||
|
|
||||||
let mut foo = Foo {
|
let mut foo = Foo {
|
||||||
|
@ -1213,12 +1214,12 @@ mod tests {
|
||||||
|
|
||||||
foo.apply(&foo_patch);
|
foo.apply(&foo_patch);
|
||||||
|
|
||||||
let mut hash_map = HashMap::default();
|
let mut hash_map: HashMap<_, _> = HashMap::default();
|
||||||
hash_map.insert(1, 1);
|
hash_map.insert(1, 1);
|
||||||
hash_map.insert(2, 3);
|
hash_map.insert(2, 3);
|
||||||
hash_map.insert(3, 4);
|
hash_map.insert(3, 4);
|
||||||
|
|
||||||
let mut hash_map_baz = HashMap::default();
|
let mut hash_map_baz: HashMap<_, _> = HashMap::default();
|
||||||
hash_map_baz.insert(1, Bar { x: 7 });
|
hash_map_baz.insert(1, Bar { x: 7 });
|
||||||
|
|
||||||
let expected_foo = Foo {
|
let expected_foo = Foo {
|
||||||
|
@ -1237,7 +1238,7 @@ mod tests {
|
||||||
let new_foo = Foo::from_reflect(&foo_patch)
|
let new_foo = Foo::from_reflect(&foo_patch)
|
||||||
.expect("error while creating a concrete type from a dynamic type");
|
.expect("error while creating a concrete type from a dynamic type");
|
||||||
|
|
||||||
let mut hash_map = HashMap::default();
|
let mut hash_map: HashMap<_, _> = HashMap::default();
|
||||||
hash_map.insert(2, 3);
|
hash_map.insert(2, 3);
|
||||||
hash_map.insert(3, 4);
|
hash_map.insert(3, 4);
|
||||||
|
|
||||||
|
@ -1394,7 +1395,7 @@ mod tests {
|
||||||
x: u32,
|
x: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut hash_map = HashMap::default();
|
let mut hash_map: HashMap<_, _> = HashMap::default();
|
||||||
hash_map.insert(1, 1);
|
hash_map.insert(1, 1);
|
||||||
hash_map.insert(2, 2);
|
hash_map.insert(2, 2);
|
||||||
let foo = Foo {
|
let foo = Foo {
|
||||||
|
@ -1483,7 +1484,8 @@ mod tests {
|
||||||
assert!(fields[0].reflect_partial_eq(&123_i32).unwrap_or_default());
|
assert!(fields[0].reflect_partial_eq(&123_i32).unwrap_or_default());
|
||||||
assert!(fields[1].reflect_partial_eq(&321_i32).unwrap_or_default());
|
assert!(fields[1].reflect_partial_eq(&321_i32).unwrap_or_default());
|
||||||
|
|
||||||
let mut map_value: Box<dyn Map> = Box::new(HashMap::from([(123_i32, 321_i32)]));
|
let mut map_value: Box<dyn Map> =
|
||||||
|
Box::new([(123_i32, 321_i32)].into_iter().collect::<HashMap<_, _>>());
|
||||||
let fields = map_value.drain();
|
let fields = map_value.drain();
|
||||||
assert!(fields[0].0.reflect_partial_eq(&123_i32).unwrap_or_default());
|
assert!(fields[0].0.reflect_partial_eq(&123_i32).unwrap_or_default());
|
||||||
assert!(fields[0].1.reflect_partial_eq(&321_i32).unwrap_or_default());
|
assert!(fields[0].1.reflect_partial_eq(&321_i32).unwrap_or_default());
|
||||||
|
@ -1847,7 +1849,7 @@ mod tests {
|
||||||
assert_eq!(usize::type_path(), info.key_ty().path());
|
assert_eq!(usize::type_path(), info.key_ty().path());
|
||||||
assert_eq!(f32::type_path(), info.value_ty().path());
|
assert_eq!(f32::type_path(), info.value_ty().path());
|
||||||
|
|
||||||
let value: &dyn Reflect = &MyMap::new();
|
let value: &dyn Reflect = &MyMap::default();
|
||||||
let info = value.reflect_type_info();
|
let info = value.reflect_type_info();
|
||||||
assert!(info.is::<MyMap>());
|
assert!(info.is::<MyMap>());
|
||||||
|
|
||||||
|
@ -2146,7 +2148,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut map = HashMap::new();
|
let mut map: HashMap<_, _> = HashMap::default();
|
||||||
map.insert(123, 1.23);
|
map.insert(123, 1.23);
|
||||||
|
|
||||||
let test = Test {
|
let test = Test {
|
||||||
|
@ -2460,7 +2462,7 @@ bevy_reflect::tests::Test {
|
||||||
// test reflected value
|
// test reflected value
|
||||||
value: u32,
|
value: u32,
|
||||||
}
|
}
|
||||||
let mut map = HashMap::new();
|
let mut map: HashMap<_, _> = HashMap::default();
|
||||||
map.insert(9, 10);
|
map.insert(9, 10);
|
||||||
let mut test_struct: DynamicStruct = TestStruct {
|
let mut test_struct: DynamicStruct = TestStruct {
|
||||||
tuple: (0, 1),
|
tuple: (0, 1),
|
||||||
|
|
|
@ -30,7 +30,7 @@ use crate::{
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bevy_reflect::{PartialReflect, Reflect, Map};
|
/// use bevy_reflect::{PartialReflect, Reflect, Map};
|
||||||
/// use bevy_utils::HashMap;
|
/// use std::collections::HashMap;
|
||||||
///
|
///
|
||||||
///
|
///
|
||||||
/// let foo: &mut dyn Map = &mut HashMap::<u32, bool>::new();
|
/// let foo: &mut dyn Map = &mut HashMap::<u32, bool>::new();
|
||||||
|
@ -568,7 +568,7 @@ pub fn map_partial_eq<M: Map + ?Sized>(a: &M, b: &dyn PartialReflect) -> Option<
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bevy_utils::HashMap;
|
/// # use std::collections::HashMap;
|
||||||
/// use bevy_reflect::Reflect;
|
/// use bevy_reflect::Reflect;
|
||||||
///
|
///
|
||||||
/// let mut my_map = HashMap::new();
|
/// let mut my_map = HashMap::new();
|
||||||
|
|
|
@ -148,10 +148,10 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_my_struct() -> MyStruct {
|
fn get_my_struct() -> MyStruct {
|
||||||
let mut map = HashMap::new();
|
let mut map: HashMap<_, _> = HashMap::default();
|
||||||
map.insert(64, 32);
|
map.insert(64, 32);
|
||||||
|
|
||||||
let mut set = HashSet::new();
|
let mut set: HashSet<_> = HashSet::default();
|
||||||
set.insert(64);
|
set.insert(64);
|
||||||
|
|
||||||
MyStruct {
|
MyStruct {
|
||||||
|
|
|
@ -128,10 +128,10 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_my_struct() -> MyStruct {
|
fn get_my_struct() -> MyStruct {
|
||||||
let mut map = HashMap::new();
|
let mut map: HashMap<_, _> = HashMap::default();
|
||||||
map.insert(64, 32);
|
map.insert(64, 32);
|
||||||
|
|
||||||
let mut set = HashSet::new();
|
let mut set: HashSet<_> = HashSet::default();
|
||||||
set.insert(64);
|
set.insert(64);
|
||||||
|
|
||||||
MyStruct {
|
MyStruct {
|
||||||
|
|
|
@ -30,7 +30,7 @@ use crate::{
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use bevy_reflect::{PartialReflect, Set};
|
/// use bevy_reflect::{PartialReflect, Set};
|
||||||
/// use bevy_utils::HashSet;
|
/// use std::collections::HashSet;
|
||||||
///
|
///
|
||||||
///
|
///
|
||||||
/// let foo: &mut dyn Set = &mut HashSet::<u32>::new();
|
/// let foo: &mut dyn Set = &mut HashSet::<u32>::new();
|
||||||
|
@ -431,7 +431,7 @@ pub fn set_partial_eq<M: Set>(a: &M, b: &dyn PartialReflect) -> Option<bool> {
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```
|
/// ```
|
||||||
/// # use bevy_utils::HashSet;
|
/// # use std::collections::HashSet;
|
||||||
/// use bevy_reflect::Reflect;
|
/// use bevy_reflect::Reflect;
|
||||||
///
|
///
|
||||||
/// let mut my_set = HashSet::new();
|
/// let mut my_set = HashSet::new();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Helpers for working with Bevy reflection.
|
//! Helpers for working with Bevy reflection.
|
||||||
|
|
||||||
use crate::TypeInfo;
|
use crate::TypeInfo;
|
||||||
use bevy_utils::{FixedState, NoOpHash, TypeIdMap};
|
use bevy_utils::{DefaultHasher, FixedHasher, NoOpHash, TypeIdMap};
|
||||||
use core::{
|
use core::{
|
||||||
any::{Any, TypeId},
|
any::{Any, TypeId},
|
||||||
hash::BuildHasher,
|
hash::BuildHasher,
|
||||||
|
@ -298,6 +298,6 @@ impl<T: TypedProperty> Default for GenericTypeCell<T> {
|
||||||
///
|
///
|
||||||
/// [`Reflect::reflect_hash`]: crate::Reflect
|
/// [`Reflect::reflect_hash`]: crate::Reflect
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn reflect_hasher() -> bevy_utils::AHasher {
|
pub fn reflect_hasher() -> DefaultHasher {
|
||||||
FixedState.build_hasher()
|
FixedHasher.build_hasher()
|
||||||
}
|
}
|
||||||
|
|
|
@ -364,7 +364,7 @@ pub fn process_remote_get_watching_request(
|
||||||
|
|
||||||
let mut changed = Vec::new();
|
let mut changed = Vec::new();
|
||||||
let mut removed = Vec::new();
|
let mut removed = Vec::new();
|
||||||
let mut errors = HashMap::new();
|
let mut errors: HashMap<_, _> = HashMap::default();
|
||||||
|
|
||||||
'component_loop: for component_path in components {
|
'component_loop: for component_path in components {
|
||||||
let Ok(type_registration) =
|
let Ok(type_registration) =
|
||||||
|
@ -847,7 +847,7 @@ fn build_components_map<'a>(
|
||||||
paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>,
|
paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>,
|
||||||
type_registry: &TypeRegistry,
|
type_registry: &TypeRegistry,
|
||||||
) -> AnyhowResult<HashMap<String, Value>> {
|
) -> AnyhowResult<HashMap<String, Value>> {
|
||||||
let mut serialized_components_map = HashMap::new();
|
let mut serialized_components_map: HashMap<_, _> = HashMap::default();
|
||||||
|
|
||||||
for (type_path, reflect_component) in paths_and_reflect_components {
|
for (type_path, reflect_component) in paths_and_reflect_components {
|
||||||
let Some(reflected) = reflect_component.reflect(entity_ref.clone()) else {
|
let Some(reflected) = reflect_component.reflect(entity_ref.clone()) else {
|
||||||
|
@ -873,7 +873,7 @@ fn build_has_map<'a>(
|
||||||
entity_ref: FilteredEntityRef,
|
entity_ref: FilteredEntityRef,
|
||||||
paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>,
|
paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>,
|
||||||
) -> HashMap<String, Value> {
|
) -> HashMap<String, Value> {
|
||||||
let mut has_map = HashMap::new();
|
let mut has_map: HashMap<_, _> = HashMap::default();
|
||||||
|
|
||||||
for (type_path, reflect_component) in paths_and_reflect_components {
|
for (type_path, reflect_component) in paths_and_reflect_components {
|
||||||
let has = reflect_component.contains(entity_ref.clone());
|
let has = reflect_component.contains(entity_ref.clone());
|
||||||
|
|
|
@ -57,7 +57,7 @@ impl Headers {
|
||||||
/// Create a new instance of `Headers`.
|
/// Create a new instance of `Headers`.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
headers: HashMap::new(),
|
headers: HashMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -888,7 +888,7 @@ pub fn camera_system<T: CameraProjection + Component>(
|
||||||
) {
|
) {
|
||||||
let primary_window = primary_window.iter().next();
|
let primary_window = primary_window.iter().next();
|
||||||
|
|
||||||
let mut changed_window_ids = HashSet::new();
|
let mut changed_window_ids: HashSet<_> = HashSet::default();
|
||||||
changed_window_ids.extend(window_created_events.read().map(|event| event.window));
|
changed_window_ids.extend(window_created_events.read().map(|event| event.window));
|
||||||
changed_window_ids.extend(window_resized_events.read().map(|event| event.window));
|
changed_window_ids.extend(window_resized_events.read().map(|event| event.window));
|
||||||
let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_events
|
let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_events
|
||||||
|
@ -927,7 +927,9 @@ pub fn camera_system<T: CameraProjection + Component>(
|
||||||
// This can happen when the window is moved between monitors with different DPIs.
|
// This can happen when the window is moved between monitors with different DPIs.
|
||||||
// Without this, the viewport will take a smaller portion of the window moved to
|
// Without this, the viewport will take a smaller portion of the window moved to
|
||||||
// a higher DPI monitor.
|
// a higher DPI monitor.
|
||||||
if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::new()) {
|
if normalized_target
|
||||||
|
.is_changed(&scale_factor_changed_window_ids, &HashSet::default())
|
||||||
|
{
|
||||||
if let (Some(new_scale_factor), Some(old_scale_factor)) = (
|
if let (Some(new_scale_factor), Some(old_scale_factor)) = (
|
||||||
new_computed_target_info
|
new_computed_target_info
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -1201,8 +1203,8 @@ pub fn sort_cameras(
|
||||||
ord => ord,
|
ord => ord,
|
||||||
});
|
});
|
||||||
let mut previous_order_target = None;
|
let mut previous_order_target = None;
|
||||||
let mut ambiguities = HashSet::new();
|
let mut ambiguities: HashSet<_> = HashSet::default();
|
||||||
let mut target_counts = HashMap::new();
|
let mut target_counts: HashMap<_, _> = HashMap::default();
|
||||||
for sorted_camera in &mut sorted_cameras.0 {
|
for sorted_camera in &mut sorted_cameras.0 {
|
||||||
let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
|
let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
|
||||||
if let Some(previous_order_target) = previous_order_target {
|
if let Some(previous_order_target) = previous_order_target {
|
||||||
|
|
|
@ -32,7 +32,7 @@ impl Node for CameraDriverNode {
|
||||||
) -> Result<(), NodeRunError> {
|
) -> Result<(), NodeRunError> {
|
||||||
let sorted_cameras = world.resource::<SortedCameras>();
|
let sorted_cameras = world.resource::<SortedCameras>();
|
||||||
let windows = world.resource::<ExtractedWindows>();
|
let windows = world.resource::<ExtractedWindows>();
|
||||||
let mut camera_windows = HashSet::new();
|
let mut camera_windows: HashSet<_> = HashSet::default();
|
||||||
for sorted_camera in &sorted_cameras.0 {
|
for sorted_camera in &sorted_cameras.0 {
|
||||||
let Ok(camera) = self.cameras.get_manual(world, sorted_camera.entity) else {
|
let Ok(camera) = self.cameras.get_manual(world, sorted_camera.entity) else {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -14,10 +14,7 @@ use bevy_ecs::{
|
||||||
system::{Res, ResMut, Resource},
|
system::{Res, ResMut, Resource},
|
||||||
world::{FromWorld, World},
|
world::{FromWorld, World},
|
||||||
};
|
};
|
||||||
use bevy_utils::{
|
use bevy_utils::{tracing::error, HashMap, HashSet};
|
||||||
hashbrown::{HashMap, HashSet},
|
|
||||||
tracing::error,
|
|
||||||
};
|
|
||||||
use offset_allocator::{Allocation, Allocator};
|
use offset_allocator::{Allocation, Allocator};
|
||||||
use wgpu::{
|
use wgpu::{
|
||||||
BufferDescriptor, BufferSize, BufferUsages, CommandEncoderDescriptor, DownlevelFlags,
|
BufferDescriptor, BufferSize, BufferUsages, CommandEncoderDescriptor, DownlevelFlags,
|
||||||
|
@ -327,10 +324,10 @@ impl FromWorld for MeshAllocator {
|
||||||
.contains(DownlevelFlags::BASE_VERTEX);
|
.contains(DownlevelFlags::BASE_VERTEX);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
slabs: HashMap::new(),
|
slabs: HashMap::default(),
|
||||||
slab_layouts: HashMap::new(),
|
slab_layouts: HashMap::default(),
|
||||||
mesh_id_to_vertex_slab: HashMap::new(),
|
mesh_id_to_vertex_slab: HashMap::default(),
|
||||||
mesh_id_to_index_slab: HashMap::new(),
|
mesh_id_to_index_slab: HashMap::default(),
|
||||||
next_slab_id: SlabId(0),
|
next_slab_id: SlabId(0),
|
||||||
general_vertex_slabs_supported,
|
general_vertex_slabs_supported,
|
||||||
}
|
}
|
||||||
|
@ -585,7 +582,7 @@ impl MeshAllocator {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn free_meshes(&mut self, extracted_meshes: &ExtractedAssets<RenderMesh>) {
|
fn free_meshes(&mut self, extracted_meshes: &ExtractedAssets<RenderMesh>) {
|
||||||
let mut empty_slabs = HashSet::new();
|
let mut empty_slabs: HashSet<_> = HashSet::default();
|
||||||
for mesh_id in &extracted_meshes.removed {
|
for mesh_id in &extracted_meshes.removed {
|
||||||
if let Some(slab_id) = self.mesh_id_to_vertex_slab.remove(mesh_id) {
|
if let Some(slab_id) = self.mesh_id_to_vertex_slab.remove(mesh_id) {
|
||||||
self.free_allocation_in_slab(mesh_id, slab_id, &mut empty_slabs);
|
self.free_allocation_in_slab(mesh_id, slab_id, &mut empty_slabs);
|
||||||
|
@ -866,8 +863,8 @@ impl GeneralSlab {
|
||||||
let mut new_slab = GeneralSlab {
|
let mut new_slab = GeneralSlab {
|
||||||
allocator: Allocator::new(slab_slot_capacity),
|
allocator: Allocator::new(slab_slot_capacity),
|
||||||
buffer: None,
|
buffer: None,
|
||||||
resident_allocations: HashMap::new(),
|
resident_allocations: HashMap::default(),
|
||||||
pending_allocations: HashMap::new(),
|
pending_allocations: HashMap::default(),
|
||||||
element_layout: layout,
|
element_layout: layout,
|
||||||
slot_capacity: slab_slot_capacity,
|
slot_capacity: slab_slot_capacity,
|
||||||
};
|
};
|
||||||
|
|
|
@ -213,8 +213,8 @@ pub(crate) fn extract_render_asset<A: RenderAsset>(
|
||||||
|world, mut cached_state: Mut<CachedExtractRenderAssetSystemState<A>>| {
|
|world, mut cached_state: Mut<CachedExtractRenderAssetSystemState<A>>| {
|
||||||
let (mut events, mut assets) = cached_state.state.get_mut(world);
|
let (mut events, mut assets) = cached_state.state.get_mut(world);
|
||||||
|
|
||||||
let mut changed_assets = HashSet::default();
|
let mut changed_assets: HashSet<_> = HashSet::default();
|
||||||
let mut removed = HashSet::default();
|
let mut removed: HashSet<_> = HashSet::default();
|
||||||
|
|
||||||
for event in events.read() {
|
for event in events.read() {
|
||||||
#[allow(clippy::match_same_arms)]
|
#[allow(clippy::match_same_arms)]
|
||||||
|
@ -234,7 +234,7 @@ pub(crate) fn extract_render_asset<A: RenderAsset>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut extracted_assets = Vec::new();
|
let mut extracted_assets = Vec::new();
|
||||||
let mut added = HashSet::new();
|
let mut added: HashSet<_> = HashSet::default();
|
||||||
for id in changed_assets.drain() {
|
for id in changed_assets.drain() {
|
||||||
if let Some(asset) = assets.get(id) {
|
if let Some(asset) = assets.get(id) {
|
||||||
let asset_usage = A::asset_usage(asset);
|
let asset_usage = A::asset_usage(asset);
|
||||||
|
|
|
@ -10,7 +10,7 @@ use bevy_utils::{
|
||||||
default,
|
default,
|
||||||
hashbrown::hash_map::{RawEntryMut, VacantEntry},
|
hashbrown::hash_map::{RawEntryMut, VacantEntry},
|
||||||
tracing::error,
|
tracing::error,
|
||||||
Entry, HashMap,
|
Entry, FixedHasher, HashMap,
|
||||||
};
|
};
|
||||||
use core::{fmt::Debug, hash::Hash};
|
use core::{fmt::Debug, hash::Hash};
|
||||||
use derive_more::derive::{Display, Error, From};
|
use derive_more::derive::{Display, Error, From};
|
||||||
|
@ -132,7 +132,11 @@ impl<S: SpecializedMeshPipeline> SpecializedMeshPipelines<S> {
|
||||||
specialize_pipeline: &S,
|
specialize_pipeline: &S,
|
||||||
key: S::Key,
|
key: S::Key,
|
||||||
layout: &MeshVertexBufferLayoutRef,
|
layout: &MeshVertexBufferLayoutRef,
|
||||||
entry: VacantEntry<(MeshVertexBufferLayoutRef, S::Key), CachedRenderPipelineId>,
|
entry: VacantEntry<
|
||||||
|
(MeshVertexBufferLayoutRef, S::Key),
|
||||||
|
CachedRenderPipelineId,
|
||||||
|
FixedHasher,
|
||||||
|
>,
|
||||||
) -> Result<CachedRenderPipelineId, SpecializedMeshPipelineError>
|
) -> Result<CachedRenderPipelineId, SpecializedMeshPipelineError>
|
||||||
where
|
where
|
||||||
S: SpecializedMeshPipeline,
|
S: SpecializedMeshPipeline,
|
||||||
|
|
|
@ -923,7 +923,7 @@ pub fn prepare_view_targets(
|
||||||
)>,
|
)>,
|
||||||
view_target_attachments: Res<ViewTargetAttachments>,
|
view_target_attachments: Res<ViewTargetAttachments>,
|
||||||
) {
|
) {
|
||||||
let mut textures = HashMap::default();
|
let mut textures: HashMap<_, _> = HashMap::default();
|
||||||
for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
|
for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
|
||||||
let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
|
let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -47,7 +47,7 @@ impl SceneFilter {
|
||||||
///
|
///
|
||||||
/// [`Denylist`]: SceneFilter::Denylist
|
/// [`Denylist`]: SceneFilter::Denylist
|
||||||
pub fn allow_all() -> Self {
|
pub fn allow_all() -> Self {
|
||||||
Self::Denylist(HashSet::new())
|
Self::Denylist(HashSet::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a filter where all types are denied.
|
/// Creates a filter where all types are denied.
|
||||||
|
@ -56,7 +56,7 @@ impl SceneFilter {
|
||||||
///
|
///
|
||||||
/// [`Allowlist`]: SceneFilter::Allowlist
|
/// [`Allowlist`]: SceneFilter::Allowlist
|
||||||
pub fn deny_all() -> Self {
|
pub fn deny_all() -> Self {
|
||||||
Self::Allowlist(HashSet::new())
|
Self::Allowlist(HashSet::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allow the given type, `T`.
|
/// Allow the given type, `T`.
|
||||||
|
@ -88,7 +88,7 @@ impl SceneFilter {
|
||||||
pub fn allow_by_id(mut self, type_id: TypeId) -> Self {
|
pub fn allow_by_id(mut self, type_id: TypeId) -> Self {
|
||||||
match &mut self {
|
match &mut self {
|
||||||
Self::Unset => {
|
Self::Unset => {
|
||||||
self = Self::Allowlist(HashSet::from([type_id]));
|
self = Self::Allowlist([type_id].into_iter().collect());
|
||||||
}
|
}
|
||||||
Self::Allowlist(list) => {
|
Self::Allowlist(list) => {
|
||||||
list.insert(type_id);
|
list.insert(type_id);
|
||||||
|
@ -128,7 +128,7 @@ impl SceneFilter {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn deny_by_id(mut self, type_id: TypeId) -> Self {
|
pub fn deny_by_id(mut self, type_id: TypeId) -> Self {
|
||||||
match &mut self {
|
match &mut self {
|
||||||
Self::Unset => self = Self::Denylist(HashSet::from([type_id])),
|
Self::Unset => self = Self::Denylist([type_id].into_iter().collect()),
|
||||||
Self::Allowlist(list) => {
|
Self::Allowlist(list) => {
|
||||||
list.remove(&type_id);
|
list.remove(&type_id);
|
||||||
}
|
}
|
||||||
|
@ -222,7 +222,7 @@ impl IntoIterator for SceneFilter {
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
match self {
|
match self {
|
||||||
Self::Unset => HashSet::new().into_iter(),
|
Self::Unset => Default::default(),
|
||||||
Self::Allowlist(list) | Self::Denylist(list) => list.into_iter(),
|
Self::Allowlist(list) | Self::Denylist(list) => list.into_iter(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -319,7 +319,7 @@ impl SceneSpawner {
|
||||||
let spawned = self
|
let spawned = self
|
||||||
.spawned_dynamic_scenes
|
.spawned_dynamic_scenes
|
||||||
.entry(handle.id())
|
.entry(handle.id())
|
||||||
.or_insert_with(HashSet::new);
|
.or_insert_with(HashSet::default);
|
||||||
spawned.insert(instance_id);
|
spawned.insert(instance_id);
|
||||||
|
|
||||||
// Scenes with parents need more setup before they are ready.
|
// Scenes with parents need more setup before they are ready.
|
||||||
|
@ -426,7 +426,7 @@ impl SceneSpawner {
|
||||||
pub fn scene_spawner_system(world: &mut World) {
|
pub fn scene_spawner_system(world: &mut World) {
|
||||||
world.resource_scope(|world, mut scene_spawner: Mut<SceneSpawner>| {
|
world.resource_scope(|world, mut scene_spawner: Mut<SceneSpawner>| {
|
||||||
// remove any loading instances where parent is deleted
|
// remove any loading instances where parent is deleted
|
||||||
let mut dead_instances = HashSet::default();
|
let mut dead_instances: HashSet<_> = HashSet::default();
|
||||||
scene_spawner
|
scene_spawner
|
||||||
.scenes_with_parent
|
.scenes_with_parent
|
||||||
.retain(|(instance, parent)| {
|
.retain(|(instance, parent)| {
|
||||||
|
|
|
@ -476,7 +476,7 @@ impl<'a, 'de> Visitor<'de> for SceneMapVisitor<'a> {
|
||||||
where
|
where
|
||||||
A: MapAccess<'de>,
|
A: MapAccess<'de>,
|
||||||
{
|
{
|
||||||
let mut added = HashSet::new();
|
let mut added: HashSet<_> = HashSet::default();
|
||||||
let mut entries = Vec::new();
|
let mut entries = Vec::new();
|
||||||
while let Some(registration) =
|
while let Some(registration) =
|
||||||
map.next_key_seed(TypeRegistrationDeserializer::new(self.registry))?
|
map.next_key_seed(TypeRegistrationDeserializer::new(self.registry))?
|
||||||
|
|
|
@ -271,7 +271,7 @@ impl<'a> TextureAtlasBuilder<'a> {
|
||||||
let rect_placements = rect_placements.ok_or(TextureAtlasBuilderError::NotEnoughSpace)?;
|
let rect_placements = rect_placements.ok_or(TextureAtlasBuilderError::NotEnoughSpace)?;
|
||||||
|
|
||||||
let mut texture_rects = Vec::with_capacity(rect_placements.packed_locations().len());
|
let mut texture_rects = Vec::with_capacity(rect_placements.packed_locations().len());
|
||||||
let mut texture_ids = HashMap::default();
|
let mut texture_ids: HashMap<_, _> = HashMap::default();
|
||||||
// We iterate through the textures to place to respect the insertion order for the texture indices
|
// We iterate through the textures to place to respect the insertion order for the texture indices
|
||||||
for (index, (image_id, texture)) in self.textures_to_place.iter().enumerate() {
|
for (index, (image_id, texture)) in self.textures_to_place.iter().enumerate() {
|
||||||
let (_, packed_location) = rect_placements.packed_locations().get(&index).unwrap();
|
let (_, packed_location) = rect_placements.packed_locations().get(&index).unwrap();
|
||||||
|
|
|
@ -718,7 +718,7 @@ mod tests {
|
||||||
ui_child_entities.len()
|
ui_child_entities.len()
|
||||||
);
|
);
|
||||||
|
|
||||||
let child_node_map = HashMap::from_iter(
|
let child_node_map: HashMap<_, _> = HashMap::from_iter(
|
||||||
ui_child_entities
|
ui_child_entities
|
||||||
.iter()
|
.iter()
|
||||||
.map(|child_entity| (*child_entity, ui_surface.entity_to_taffy[child_entity])),
|
.map(|child_entity| (*child_entity, ui_surface.entity_to_taffy[child_entity])),
|
||||||
|
|
|
@ -29,7 +29,7 @@ use bevy_ecs::{prelude::*, query::QueryData};
|
||||||
use bevy_math::{Rect, Vec2};
|
use bevy_math::{Rect, Vec2};
|
||||||
use bevy_render::prelude::*;
|
use bevy_render::prelude::*;
|
||||||
use bevy_transform::prelude::*;
|
use bevy_transform::prelude::*;
|
||||||
use bevy_utils::hashbrown::HashMap;
|
use bevy_utils::HashMap;
|
||||||
use bevy_window::PrimaryWindow;
|
use bevy_window::PrimaryWindow;
|
||||||
|
|
||||||
use bevy_picking::backend::prelude::*;
|
use bevy_picking::backend::prelude::*;
|
||||||
|
@ -70,7 +70,7 @@ pub fn ui_picking(
|
||||||
mut output: EventWriter<PointerHits>,
|
mut output: EventWriter<PointerHits>,
|
||||||
) {
|
) {
|
||||||
// For each camera, the pointer and its position
|
// For each camera, the pointer and its position
|
||||||
let mut pointer_pos_by_camera = HashMap::<Entity, HashMap<PointerId, Vec2>>::new();
|
let mut pointer_pos_by_camera = HashMap::<Entity, HashMap<PointerId, Vec2>>::default();
|
||||||
|
|
||||||
for (pointer_id, pointer_location) in
|
for (pointer_id, pointer_location) in
|
||||||
pointers.iter().filter_map(|(pointer, pointer_location)| {
|
pointers.iter().filter_map(|(pointer, pointer_location)| {
|
||||||
|
@ -107,7 +107,7 @@ pub fn ui_picking(
|
||||||
}
|
}
|
||||||
|
|
||||||
// The list of node entities hovered for each (camera, pointer) combo
|
// The list of node entities hovered for each (camera, pointer) combo
|
||||||
let mut hit_nodes = HashMap::<(Entity, PointerId), Vec<Entity>>::new();
|
let mut hit_nodes = HashMap::<(Entity, PointerId), Vec<Entity>>::default();
|
||||||
|
|
||||||
// prepare an iterator that contains all the nodes that have the cursor in their rect,
|
// prepare an iterator that contains all the nodes that have the cursor in their rect,
|
||||||
// from the top node to the bottom one. this will also reset the interaction to `None`
|
// from the top node to the bottom one. this will also reset the interaction to `None`
|
||||||
|
|
|
@ -62,7 +62,7 @@ pub fn ui_stack_system(
|
||||||
maybe_zindex.map(|zindex| zindex.0).unwrap_or(0),
|
maybe_zindex.map(|zindex| zindex.0).unwrap_or(0),
|
||||||
),
|
),
|
||||||
));
|
));
|
||||||
visited_root_nodes.insert_unique_unchecked(id);
|
visited_root_nodes.insert(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (id, global_zindex, maybe_zindex) in zindex_global_node_query.iter() {
|
for (id, global_zindex, maybe_zindex) in zindex_global_node_query.iter() {
|
||||||
|
|
|
@ -146,7 +146,7 @@ pub fn update_target_camera_system(
|
||||||
) {
|
) {
|
||||||
// Track updated entities to prevent redundant updates, as `Commands` changes are deferred,
|
// Track updated entities to prevent redundant updates, as `Commands` changes are deferred,
|
||||||
// and updates done for changed_children_query can overlap with itself or with root_node_query
|
// and updates done for changed_children_query can overlap with itself or with root_node_query
|
||||||
let mut updated_entities = HashSet::new();
|
let mut updated_entities: HashSet<_> = HashSet::default();
|
||||||
|
|
||||||
// Assuming that TargetCamera is manually set on the root node only,
|
// Assuming that TargetCamera is manually set on the root node only,
|
||||||
// update root nodes first, since it implies the biggest change
|
// update root nodes first, since it implies the biggest change
|
||||||
|
|
|
@ -10,22 +10,18 @@ keywords = ["bevy"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std", "serde"]
|
default = ["std", "serde"]
|
||||||
std = [
|
std = ["alloc", "tracing/std", "foldhash/std", "dep:thread_local"]
|
||||||
"alloc",
|
alloc = ["hashbrown"]
|
||||||
"tracing/std",
|
detailed_trace = []
|
||||||
"ahash/std",
|
|
||||||
"dep:thread_local",
|
|
||||||
"ahash/runtime-rng",
|
|
||||||
]
|
|
||||||
alloc = ["hashbrown/default"]
|
|
||||||
serde = ["hashbrown/serde"]
|
serde = ["hashbrown/serde"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ahash = { version = "0.8.7", default-features = false, features = [
|
foldhash = { version = "0.1.3", default-features = false }
|
||||||
"compile-time-rng",
|
|
||||||
] }
|
|
||||||
tracing = { version = "0.1", default-features = false }
|
tracing = { version = "0.1", default-features = false }
|
||||||
hashbrown = { version = "0.14.2", default-features = false }
|
hashbrown = { version = "0.15.1", features = [
|
||||||
|
"equivalent",
|
||||||
|
"raw-entry",
|
||||||
|
], optional = true, default-features = false }
|
||||||
bevy_utils_proc_macros = { version = "0.15.0-dev", path = "macros" }
|
bevy_utils_proc_macros = { version = "0.15.0-dev", path = "macros" }
|
||||||
thread_local = { version = "1.0", optional = true }
|
thread_local = { version = "1.0", optional = true }
|
||||||
|
|
||||||
|
|
|
@ -35,9 +35,29 @@ mod once;
|
||||||
mod parallel_queue;
|
mod parallel_queue;
|
||||||
mod time;
|
mod time;
|
||||||
|
|
||||||
pub use ahash::{AHasher, RandomState};
|
/// For when you want a deterministic hasher.
|
||||||
|
///
|
||||||
|
/// Seed was randomly generated with a fair dice roll. Guaranteed to be random:
|
||||||
|
/// <https://github.com/bevyengine/bevy/pull/1268/files#r560918426>
|
||||||
|
const FIXED_HASHER: FixedState =
|
||||||
|
FixedState::with_seed(0b1001010111101110000001001100010000000011001001101011001001111000);
|
||||||
|
|
||||||
|
/// Deterministic hasher based upon a random but fixed state.
|
||||||
|
#[derive(Copy, Clone, Default, Debug)]
|
||||||
|
pub struct FixedHasher;
|
||||||
|
impl BuildHasher for FixedHasher {
|
||||||
|
type Hasher = DefaultHasher;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn build_hasher(&self) -> Self::Hasher {
|
||||||
|
FIXED_HASHER.build_hasher()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub use bevy_utils_proc_macros::*;
|
pub use bevy_utils_proc_macros::*;
|
||||||
pub use default::default;
|
pub use default::default;
|
||||||
|
pub use foldhash::fast::{FixedState, FoldHasher as DefaultHasher, RandomState};
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
pub use hashbrown;
|
pub use hashbrown;
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub use parallel_queue::*;
|
pub use parallel_queue::*;
|
||||||
|
@ -47,15 +67,15 @@ pub use tracing;
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
use alloc::boxed::Box;
|
use alloc::boxed::Box;
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
|
use core::any::TypeId;
|
||||||
use core::{
|
use core::{
|
||||||
any::TypeId,
|
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
|
hash::{BuildHasher, Hash, Hasher},
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
mem::ManuallyDrop,
|
mem::ManuallyDrop,
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
};
|
};
|
||||||
use hashbrown::hash_map::RawEntryMut;
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
mod conditional_send {
|
mod conditional_send {
|
||||||
|
@ -84,70 +104,60 @@ impl<T: core::future::Future + ConditionalSend> ConditionalSendFuture for T {}
|
||||||
pub type BoxedFuture<'a, T> = core::pin::Pin<Box<dyn ConditionalSendFuture<Output = T> + 'a>>;
|
pub type BoxedFuture<'a, T> = core::pin::Pin<Box<dyn ConditionalSendFuture<Output = T> + 'a>>;
|
||||||
|
|
||||||
/// A shortcut alias for [`hashbrown::hash_map::Entry`].
|
/// A shortcut alias for [`hashbrown::hash_map::Entry`].
|
||||||
pub type Entry<'a, K, V, S = BuildHasherDefault<AHasher>> = hashbrown::hash_map::Entry<'a, K, V, S>;
|
#[cfg(feature = "alloc")]
|
||||||
|
pub type Entry<'a, K, V, S = FixedHasher> = hashbrown::hash_map::Entry<'a, K, V, S>;
|
||||||
|
|
||||||
/// A hasher builder that will create a fixed hasher.
|
/// A [`HashMap`][hashbrown::HashMap] implementing a high
|
||||||
#[derive(Debug, Clone, Default)]
|
|
||||||
pub struct FixedState;
|
|
||||||
|
|
||||||
impl BuildHasher for FixedState {
|
|
||||||
type Hasher = AHasher;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn build_hasher(&self) -> AHasher {
|
|
||||||
RandomState::with_seeds(
|
|
||||||
0b10010101111011100000010011000100,
|
|
||||||
0b00000011001001101011001001111000,
|
|
||||||
0b11001111011010110111100010110101,
|
|
||||||
0b00000100001111100011010011010101,
|
|
||||||
)
|
|
||||||
.build_hasher()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A [`HashMap`][hashbrown::HashMap] implementing aHash, a high
|
|
||||||
/// speed keyed hashing algorithm intended for use in in-memory hashmaps.
|
/// speed keyed hashing algorithm intended for use in in-memory hashmaps.
|
||||||
///
|
///
|
||||||
/// aHash is designed for performance and is NOT cryptographically secure.
|
/// The hashing algorithm is designed for performance
|
||||||
|
/// and is NOT cryptographically secure.
|
||||||
///
|
///
|
||||||
/// Within the same execution of the program iteration order of different
|
/// Within the same execution of the program iteration order of different
|
||||||
/// `HashMap`s only depends on the order of insertions and deletions,
|
/// `HashMap`s only depends on the order of insertions and deletions,
|
||||||
/// but it will not be stable between multiple executions of the program.
|
/// but it will not be stable between multiple executions of the program.
|
||||||
pub type HashMap<K, V> = hashbrown::HashMap<K, V, BuildHasherDefault<AHasher>>;
|
#[cfg(feature = "alloc")]
|
||||||
|
pub type HashMap<K, V, S = FixedHasher> = hashbrown::HashMap<K, V, S>;
|
||||||
|
|
||||||
/// A stable hash map implementing aHash, a high speed keyed hashing algorithm
|
/// A stable hash map implementing a high speed keyed hashing algorithm
|
||||||
/// intended for use in in-memory hashmaps.
|
/// intended for use in in-memory hashmaps.
|
||||||
///
|
///
|
||||||
/// Unlike [`HashMap`] the iteration order stability extends between executions
|
/// Unlike [`HashMap`] the iteration order stability extends between executions
|
||||||
/// using the same Bevy version on the same device.
|
/// using the same Bevy version on the same device.
|
||||||
///
|
///
|
||||||
/// aHash is designed for performance and is NOT cryptographically secure.
|
/// The hashing algorithm is designed for performance
|
||||||
|
/// and is NOT cryptographically secure.
|
||||||
#[deprecated(
|
#[deprecated(
|
||||||
note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashMap<K, V, FixedState>"
|
note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashMap<K, V, FixedHasher>"
|
||||||
)]
|
)]
|
||||||
pub type StableHashMap<K, V> = hashbrown::HashMap<K, V, FixedState>;
|
#[cfg(feature = "alloc")]
|
||||||
|
pub type StableHashMap<K, V> = hashbrown::HashMap<K, V, FixedHasher>;
|
||||||
|
|
||||||
/// A [`HashSet`][hashbrown::HashSet] implementing aHash, a high
|
/// A [`HashSet`][hashbrown::HashSet] implementing a high
|
||||||
/// speed keyed hashing algorithm intended for use in in-memory hashmaps.
|
/// speed keyed hashing algorithm intended for use in in-memory hashmaps.
|
||||||
///
|
///
|
||||||
/// aHash is designed for performance and is NOT cryptographically secure.
|
/// The hashing algorithm is designed for performance
|
||||||
|
/// and is NOT cryptographically secure.
|
||||||
///
|
///
|
||||||
/// Within the same execution of the program iteration order of different
|
/// Within the same execution of the program iteration order of different
|
||||||
/// `HashSet`s only depends on the order of insertions and deletions,
|
/// `HashSet`s only depends on the order of insertions and deletions,
|
||||||
/// but it will not be stable between multiple executions of the program.
|
/// but it will not be stable between multiple executions of the program.
|
||||||
pub type HashSet<K> = hashbrown::HashSet<K, BuildHasherDefault<AHasher>>;
|
#[cfg(feature = "alloc")]
|
||||||
|
pub type HashSet<K, S = FixedHasher> = hashbrown::HashSet<K, S>;
|
||||||
|
|
||||||
/// A stable hash set implementing aHash, a high speed keyed hashing algorithm
|
/// A stable hash set using a high speed keyed hashing algorithm
|
||||||
/// intended for use in in-memory hashmaps.
|
/// intended for use in in-memory hashmaps.
|
||||||
///
|
///
|
||||||
/// Unlike [`HashMap`] the iteration order stability extends between executions
|
/// Unlike [`HashMap`] the iteration order stability extends between executions
|
||||||
/// using the same Bevy version on the same device.
|
/// using the same Bevy version on the same device.
|
||||||
///
|
///
|
||||||
/// aHash is designed for performance and is NOT cryptographically secure.
|
/// The hashing algorithm is designed for performance
|
||||||
|
/// and is NOT cryptographically secure.
|
||||||
#[deprecated(
|
#[deprecated(
|
||||||
note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashSet<K, FixedState>"
|
note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashSet<K, FixedHasher>"
|
||||||
)]
|
)]
|
||||||
pub type StableHashSet<K> = hashbrown::HashSet<K, FixedState>;
|
#[cfg(feature = "alloc")]
|
||||||
|
pub type StableHashSet<K> = hashbrown::HashSet<K, FixedHasher>;
|
||||||
|
|
||||||
/// A pre-hashed value of a specific type. Pre-hashing enables memoization of hashes that are expensive to compute.
|
/// A pre-hashed value of a specific type. Pre-hashing enables memoization of hashes that are expensive to compute.
|
||||||
///
|
///
|
||||||
|
@ -155,10 +165,10 @@ pub type StableHashSet<K> = hashbrown::HashSet<K, FixedState>;
|
||||||
/// See [`PassHash`] and [`PassHasher`] for a "pass through" [`BuildHasher`] and [`Hasher`] implementation
|
/// See [`PassHash`] and [`PassHasher`] for a "pass through" [`BuildHasher`] and [`Hasher`] implementation
|
||||||
/// designed to work with [`Hashed`]
|
/// designed to work with [`Hashed`]
|
||||||
/// See [`PreHashMap`] for a hashmap pre-configured to use [`Hashed`] keys.
|
/// See [`PreHashMap`] for a hashmap pre-configured to use [`Hashed`] keys.
|
||||||
pub struct Hashed<V, H = FixedState> {
|
pub struct Hashed<V, S = FixedHasher> {
|
||||||
hash: u64,
|
hash: u64,
|
||||||
value: V,
|
value: V,
|
||||||
marker: PhantomData<H>,
|
marker: PhantomData<S>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<V: Hash, H: BuildHasher + Default> Hashed<V, H> {
|
impl<V: Hash, H: BuildHasher + Default> Hashed<V, H> {
|
||||||
|
@ -262,9 +272,11 @@ impl Hasher for PassHasher {
|
||||||
|
|
||||||
/// A [`HashMap`] pre-configured to use [`Hashed`] keys and [`PassHash`] passthrough hashing.
|
/// A [`HashMap`] pre-configured to use [`Hashed`] keys and [`PassHash`] passthrough hashing.
|
||||||
/// Iteration order only depends on the order of insertions and deletions.
|
/// Iteration order only depends on the order of insertions and deletions.
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
pub type PreHashMap<K, V> = hashbrown::HashMap<Hashed<K>, V, PassHash>;
|
pub type PreHashMap<K, V> = hashbrown::HashMap<Hashed<K>, V, PassHash>;
|
||||||
|
|
||||||
/// Extension methods intended to add functionality to [`PreHashMap`].
|
/// Extension methods intended to add functionality to [`PreHashMap`].
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
pub trait PreHashMapExt<K, V> {
|
pub trait PreHashMapExt<K, V> {
|
||||||
/// Tries to get or insert the value for the given `key` using the pre-computed hash first.
|
/// Tries to get or insert the value for the given `key` using the pre-computed hash first.
|
||||||
/// If the [`PreHashMap`] does not already contain the `key`, it will clone it and insert
|
/// If the [`PreHashMap`] does not already contain the `key`, it will clone it and insert
|
||||||
|
@ -272,9 +284,11 @@ pub trait PreHashMapExt<K, V> {
|
||||||
fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V;
|
fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl<K: Hash + Eq + PartialEq + Clone, V> PreHashMapExt<K, V> for PreHashMap<K, V> {
|
impl<K: Hash + Eq + PartialEq + Clone, V> PreHashMapExt<K, V> for PreHashMap<K, V> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V {
|
fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V {
|
||||||
|
use hashbrown::hash_map::RawEntryMut;
|
||||||
let entry = self
|
let entry = self
|
||||||
.raw_entry_mut()
|
.raw_entry_mut()
|
||||||
.from_key_hashed_nocheck(key.hash(), key);
|
.from_key_hashed_nocheck(key.hash(), key);
|
||||||
|
@ -290,6 +304,7 @@ impl<K: Hash + Eq + PartialEq + Clone, V> PreHashMapExt<K, V> for PreHashMap<K,
|
||||||
|
|
||||||
/// A specialized hashmap type with Key of [`TypeId`]
|
/// A specialized hashmap type with Key of [`TypeId`]
|
||||||
/// Iteration order only depends on the order of insertions and deletions.
|
/// Iteration order only depends on the order of insertions and deletions.
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
pub type TypeIdMap<V> = hashbrown::HashMap<TypeId, V, NoOpHash>;
|
pub type TypeIdMap<V> = hashbrown::HashMap<TypeId, V, NoOpHash>;
|
||||||
|
|
||||||
/// [`BuildHasher`] for types that already contain a high-quality hash.
|
/// [`BuildHasher`] for types that already contain a high-quality hash.
|
||||||
|
@ -447,8 +462,8 @@ mod tests {
|
||||||
fn stable_hash_within_same_program_execution() {
|
fn stable_hash_within_same_program_execution() {
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
|
|
||||||
let mut map_1 = HashMap::new();
|
let mut map_1: HashMap<_, _> = HashMap::default();
|
||||||
let mut map_2 = HashMap::new();
|
let mut map_2: HashMap<_, _> = HashMap::default();
|
||||||
for i in 1..10 {
|
for i in 1..10 {
|
||||||
map_1.insert(i, i);
|
map_1.insert(i, i);
|
||||||
map_2.insert(i, i);
|
map_2.insert(i, i);
|
||||||
|
|
|
@ -572,7 +572,7 @@ impl PerMethodSettings {
|
||||||
|
|
||||||
impl Default for PerMethodSettings {
|
impl Default for PerMethodSettings {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let mut settings = HashMap::new();
|
let mut settings: HashMap<_, _> = HashMap::default();
|
||||||
|
|
||||||
for method in [
|
for method in [
|
||||||
Tonemapping::None,
|
Tonemapping::None,
|
||||||
|
|
|
@ -4,8 +4,8 @@ use bevy::{
|
||||||
animation::{AnimationTarget, AnimationTargetId},
|
animation::{AnimationTarget, AnimationTargetId},
|
||||||
color::palettes::css::{LIGHT_GRAY, WHITE},
|
color::palettes::css::{LIGHT_GRAY, WHITE},
|
||||||
prelude::*,
|
prelude::*,
|
||||||
utils::hashbrown::HashSet,
|
|
||||||
};
|
};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
// IDs of the mask groups we define for the running fox model.
|
// IDs of the mask groups we define for the running fox model.
|
||||||
//
|
//
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
//! This example show how you can create components dynamically, spawn entities with those components
|
//! This example show how you can create components dynamically, spawn entities with those components
|
||||||
//! as well as query for entities with those components.
|
//! as well as query for entities with those components.
|
||||||
|
|
||||||
use std::{alloc::Layout, io::Write, ptr::NonNull};
|
use std::{alloc::Layout, collections::HashMap, io::Write, ptr::NonNull};
|
||||||
|
|
||||||
use bevy::{
|
use bevy::{
|
||||||
ecs::{
|
ecs::{
|
||||||
|
@ -13,7 +13,6 @@ use bevy::{
|
||||||
},
|
},
|
||||||
prelude::*,
|
prelude::*,
|
||||||
ptr::{Aligned, OwningPtr},
|
ptr::{Aligned, OwningPtr},
|
||||||
utils::HashMap,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const PROMPT: &str = "
|
const PROMPT: &str = "
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
//! This example displays each contributor to the bevy source code as a bouncing bevy-ball.
|
//! This example displays each contributor to the bevy source code as a bouncing bevy-ball.
|
||||||
|
|
||||||
use bevy::{math::bounding::Aabb2d, prelude::*, utils::HashMap};
|
use bevy::{math::bounding::Aabb2d, prelude::*};
|
||||||
use rand::{Rng, SeedableRng};
|
use rand::{Rng, SeedableRng};
|
||||||
use rand_chacha::ChaCha8Rng;
|
use rand_chacha::ChaCha8Rng;
|
||||||
use std::{
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
env::VarError,
|
env::VarError,
|
||||||
hash::{DefaultHasher, Hash, Hasher},
|
hash::{DefaultHasher, Hash, Hasher},
|
||||||
io::{self, BufRead, BufReader},
|
io::{self, BufRead, BufReader},
|
||||||
|
|
|
@ -195,7 +195,7 @@ fn main() {
|
||||||
|
|
||||||
dynamic_set.remove(&"y");
|
dynamic_set.remove(&"y");
|
||||||
|
|
||||||
let mut my_set: HashSet<&str> = HashSet::new();
|
let mut my_set: HashSet<&str> = HashSet::default();
|
||||||
my_set.apply(&dynamic_set);
|
my_set.apply(&dynamic_set);
|
||||||
assert_eq!(my_set, HashSet::from_iter(["x", "z"]));
|
assert_eq!(my_set, HashSet::from_iter(["x", "z"]));
|
||||||
}
|
}
|
||||||
|
@ -204,7 +204,7 @@ fn main() {
|
||||||
{
|
{
|
||||||
let dynamic_map = DynamicMap::from_iter([("x", 1u32), ("y", 2u32), ("z", 3u32)]);
|
let dynamic_map = DynamicMap::from_iter([("x", 1u32), ("y", 2u32), ("z", 3u32)]);
|
||||||
|
|
||||||
let mut my_map: HashMap<&str, u32> = HashMap::new();
|
let mut my_map: HashMap<&str, u32> = HashMap::default();
|
||||||
my_map.apply(&dynamic_map);
|
my_map.apply(&dynamic_map);
|
||||||
assert_eq!(my_map.get("x"), Some(&1));
|
assert_eq!(my_map.get("x"), Some(&1));
|
||||||
assert_eq!(my_map.get("y"), Some(&2));
|
assert_eq!(my_map.get("y"), Some(&2));
|
||||||
|
|
|
@ -68,7 +68,7 @@ enum F {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup() {
|
fn setup() {
|
||||||
let mut z = HashMap::default();
|
let mut z: HashMap<_, _> = HashMap::default();
|
||||||
z.insert("Hello".to_string(), 1.0);
|
z.insert("Hello".to_string(), 1.0);
|
||||||
let value: Box<dyn Reflect> = Box::new(A {
|
let value: Box<dyn Reflect> = Box::new(A {
|
||||||
x: 1,
|
x: 1,
|
||||||
|
|
|
@ -71,7 +71,7 @@ fn configure_ambiguity_detection(sub_app: &mut SubApp) {
|
||||||
/// Returns the number of conflicting systems per schedule.
|
/// Returns the number of conflicting systems per schedule.
|
||||||
fn count_ambiguities(sub_app: &SubApp) -> AmbiguitiesCount {
|
fn count_ambiguities(sub_app: &SubApp) -> AmbiguitiesCount {
|
||||||
let schedules = sub_app.world().resource::<Schedules>();
|
let schedules = sub_app.world().resource::<Schedules>();
|
||||||
let mut ambiguities = HashMap::new();
|
let mut ambiguities: HashMap<_, _> = HashMap::default();
|
||||||
for (_, schedule) in schedules.iter() {
|
for (_, schedule) in schedules.iter() {
|
||||||
let ambiguities_in_schedule = schedule.graph().conflicting_systems().len();
|
let ambiguities_in_schedule = schedule.graph().conflicting_systems().len();
|
||||||
ambiguities.insert(schedule.label(), ambiguities_in_schedule);
|
ambiguities.insert(schedule.label(), ambiguities_in_schedule);
|
||||||
|
|
|
@ -12,7 +12,7 @@ toml_edit = { version = "0.22.7", default-features = false, features = [
|
||||||
tera = "1.15"
|
tera = "1.15"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
bitflags = "2.3"
|
bitflags = "2.3"
|
||||||
hashbrown = { version = "0.14", features = ["serde"] }
|
hashbrown = { version = "0.15", features = ["serde"] }
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
Loading…
Reference in a new issue