mirror of
https://github.com/bevyengine/bevy
synced 2024-11-21 20:23:28 +00:00
Apply Clippy lints regarding lazy evaluation and closures (#14015)
# Objective - Lazily evaluate [default](https://rust-lang.github.io/rust-clippy/master/index.html#/unwrap_or_default)~~/[or](https://rust-lang.github.io/rust-clippy/master/index.html#/or_fun_call)~~ values where it makes sense - ~~`unwrap_or(foo())` -> `unwrap_or_else(|| foo())`~~ - `unwrap_or(Default::default())` -> `unwrap_or_default()` - etc. - Avoid creating [redundant closures](https://rust-lang.github.io/rust-clippy/master/index.html#/redundant_closure), even for [method calls](https://rust-lang.github.io/rust-clippy/master/index.html#/redundant_closure_for_method_calls) - `map(|something| something.into())` -> `map(Into:into)` ## Solution - Apply Clippy lints: - ~~[or_fun_call](https://rust-lang.github.io/rust-clippy/master/index.html#/or_fun_call)~~ - [unwrap_or_default](https://rust-lang.github.io/rust-clippy/master/index.html#/unwrap_or_default) - [redundant_closure_for_method_calls](https://rust-lang.github.io/rust-clippy/master/index.html#/redundant_closure_for_method_calls) ([redundant closures](https://rust-lang.github.io/rust-clippy/master/index.html#/redundant_closure) is already enabled) ## Testing - Tested on Windows 11 (`stable-x86_64-pc-windows-gnu`, 1.79.0) - Bevy compiles without errors or warnings and examples seem to work as intended - `cargo clippy` ✅ - `cargo run -p ci -- compile` ✅ --------- Co-authored-by: Alice Cecile <alice.i.cecile@gmail.com>
This commit is contained in:
parent
9055fc1d68
commit
856b39d821
67 changed files with 121 additions and 126 deletions
|
@ -38,6 +38,8 @@ undocumented_unsafe_blocks = "warn"
|
|||
redundant_else = "warn"
|
||||
match_same_arms = "warn"
|
||||
semicolon_if_nothing_returned = "warn"
|
||||
redundant_closure_for_method_calls = "warn"
|
||||
unwrap_or_default = "warn"
|
||||
|
||||
ptr_as_ptr = "warn"
|
||||
ptr_cast_constness = "warn"
|
||||
|
|
|
@ -612,7 +612,7 @@ impl AnimationPlayer {
|
|||
pub fn all_finished(&self) -> bool {
|
||||
self.active_animations
|
||||
.values()
|
||||
.all(|playing_animation| playing_animation.is_finished())
|
||||
.all(ActiveAnimation::is_finished)
|
||||
}
|
||||
|
||||
/// Check if all playing animations are paused.
|
||||
|
@ -620,7 +620,7 @@ impl AnimationPlayer {
|
|||
pub fn all_paused(&self) -> bool {
|
||||
self.active_animations
|
||||
.values()
|
||||
.all(|playing_animation| playing_animation.is_paused())
|
||||
.all(ActiveAnimation::is_paused)
|
||||
}
|
||||
|
||||
/// Resume all playing animations.
|
||||
|
|
|
@ -242,7 +242,7 @@ impl App {
|
|||
let main = self.main_mut();
|
||||
main.plugin_registry = plugins;
|
||||
main.plugins_state = PluginsState::Finished;
|
||||
self.sub_apps.iter_mut().skip(1).for_each(|s| s.finish());
|
||||
self.sub_apps.iter_mut().skip(1).for_each(SubApp::finish);
|
||||
}
|
||||
|
||||
/// Runs [`Plugin::cleanup`] for each plugin. This is usually called by the event loop after
|
||||
|
@ -256,12 +256,12 @@ impl App {
|
|||
let main = self.main_mut();
|
||||
main.plugin_registry = plugins;
|
||||
main.plugins_state = PluginsState::Cleaned;
|
||||
self.sub_apps.iter_mut().skip(1).for_each(|s| s.cleanup());
|
||||
self.sub_apps.iter_mut().skip(1).for_each(SubApp::cleanup);
|
||||
}
|
||||
|
||||
/// Returns `true` if any of the sub-apps are building plugins.
|
||||
pub(crate) fn is_building_plugins(&self) -> bool {
|
||||
self.sub_apps.iter().any(|s| s.is_building_plugins())
|
||||
self.sub_apps.iter().any(SubApp::is_building_plugins)
|
||||
}
|
||||
|
||||
/// Adds one or more systems to the given schedule in this app's [`Schedules`].
|
||||
|
|
|
@ -22,11 +22,7 @@ pub(crate) fn get_base_path() -> PathBuf {
|
|||
PathBuf::from(manifest_dir)
|
||||
} else {
|
||||
env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent()
|
||||
.map(|exe_parent_path| exe_parent_path.to_owned())
|
||||
.unwrap()
|
||||
})
|
||||
.map(|path| path.parent().map(ToOwned::to_owned).unwrap())
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ where
|
|||
.expect("AssetLoader settings should match the loader type");
|
||||
let asset = <L as AssetLoader>::load(self, reader, settings, &mut load_context)
|
||||
.await
|
||||
.map_err(|error| error.into())?;
|
||||
.map_err(Into::into)?;
|
||||
Ok(load_context.finish(asset, Some(meta)).into())
|
||||
})
|
||||
}
|
||||
|
@ -540,7 +540,7 @@ impl<'a> LoadContext<'a> {
|
|||
.meta
|
||||
.as_ref()
|
||||
.and_then(|m| m.processed_info().as_ref());
|
||||
let hash = info.map(|i| i.full_hash).unwrap_or(Default::default());
|
||||
let hash = info.map(|i| i.full_hash).unwrap_or_default();
|
||||
self.loader_dependencies.insert(path, hash);
|
||||
Ok(loaded_asset)
|
||||
}
|
||||
|
|
|
@ -320,7 +320,7 @@ impl<'a> AssetPath<'a> {
|
|||
AssetPath {
|
||||
source: self.source.into_owned(),
|
||||
path: self.path.into_owned(),
|
||||
label: self.label.map(|l| l.into_owned()),
|
||||
label: self.label.map(CowArc::into_owned),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::any::{Any, TypeId};
|
|||
use bevy_ecs::world::{unsafe_world_cell::UnsafeWorldCell, World};
|
||||
use bevy_reflect::{FromReflect, FromType, Reflect};
|
||||
|
||||
use crate::{Asset, Assets, Handle, UntypedAssetId, UntypedHandle};
|
||||
use crate::{Asset, AssetId, Assets, Handle, UntypedAssetId, UntypedHandle};
|
||||
|
||||
/// Type data for the [`TypeRegistry`](bevy_reflect::TypeRegistry) used to operate on reflected [`Asset`]s.
|
||||
///
|
||||
|
@ -160,7 +160,7 @@ impl<A: Asset + FromReflect> FromType<A> for ReflectAsset {
|
|||
},
|
||||
ids: |world| {
|
||||
let assets = world.resource::<Assets<A>>();
|
||||
Box::new(assets.ids().map(|i| i.untyped()))
|
||||
Box::new(assets.ids().map(AssetId::untyped))
|
||||
},
|
||||
remove: |world, handle| {
|
||||
let mut assets = world.resource_mut::<Assets<A>>();
|
||||
|
|
|
@ -212,7 +212,7 @@ impl AssetLoaders {
|
|||
}
|
||||
|
||||
// Try extracting the extension from the path
|
||||
if let Some(full_extension) = asset_path.and_then(|path| path.get_full_extension()) {
|
||||
if let Some(full_extension) = asset_path.and_then(AssetPath::get_full_extension) {
|
||||
if let Some(&index) = try_extension(full_extension.as_str()) {
|
||||
return self.get_by_index(index);
|
||||
}
|
||||
|
|
|
@ -230,7 +230,7 @@ impl AssetServer {
|
|||
|
||||
let mut extensions = vec![full_extension.clone()];
|
||||
extensions.extend(
|
||||
AssetPath::iter_secondary_extensions(&full_extension).map(|e| e.to_string()),
|
||||
AssetPath::iter_secondary_extensions(&full_extension).map(ToString::to_string),
|
||||
);
|
||||
|
||||
MissingAssetLoaderForExtensionError { extensions }
|
||||
|
@ -493,7 +493,7 @@ impl AssetServer {
|
|||
force: bool,
|
||||
meta_transform: Option<MetaTransform>,
|
||||
) -> Result<UntypedHandle, AssetLoadError> {
|
||||
let asset_type_id = input_handle.as_ref().map(|handle| handle.type_id());
|
||||
let asset_type_id = input_handle.as_ref().map(UntypedHandle::type_id);
|
||||
|
||||
let path = path.into_owned();
|
||||
let path_clone = path.clone();
|
||||
|
@ -938,7 +938,7 @@ impl AssetServer {
|
|||
/// or is still "alive".
|
||||
pub fn get_handle<'a, A: Asset>(&self, path: impl Into<AssetPath<'a>>) -> Option<Handle<A>> {
|
||||
self.get_path_and_type_id_handle(&path.into(), TypeId::of::<A>())
|
||||
.map(|h| h.typed_debug_checked())
|
||||
.map(UntypedHandle::typed_debug_checked)
|
||||
}
|
||||
|
||||
/// Get a `Handle` from an `AssetId`.
|
||||
|
@ -949,7 +949,8 @@ impl AssetServer {
|
|||
/// Consider using [`Assets::get_strong_handle`] in the case the `Handle`
|
||||
/// comes from [`Assets::add`].
|
||||
pub fn get_id_handle<A: Asset>(&self, id: AssetId<A>) -> Option<Handle<A>> {
|
||||
self.get_id_handle_untyped(id.untyped()).map(|h| h.typed())
|
||||
self.get_id_handle_untyped(id.untyped())
|
||||
.map(UntypedHandle::typed)
|
||||
}
|
||||
|
||||
/// Get an `UntypedHandle` from an `UntypedAssetId`.
|
||||
|
|
|
@ -99,7 +99,7 @@ impl ExtractComponent for Skybox {
|
|||
|
||||
fn extract_component((skybox, exposure): QueryItem<'_, Self::QueryData>) -> Option<Self::Out> {
|
||||
let exposure = exposure
|
||||
.map(|e| e.exposure())
|
||||
.map(Exposure::exposure)
|
||||
.unwrap_or_else(|| Exposure::default().exposure());
|
||||
|
||||
Some((
|
||||
|
|
|
@ -48,7 +48,7 @@ impl ViewNode for UpscalingNode {
|
|||
ClearColorConfig::Custom(color) => Some(color),
|
||||
ClearColorConfig::None => None,
|
||||
};
|
||||
let converted_clear_color = clear_color.map(|color| color.into());
|
||||
let converted_clear_color = clear_color.map(Into::into);
|
||||
let upscaled_texture = target.main_texture_view();
|
||||
|
||||
let mut cached_bind_group = self.cached_texture_bind_group.lock().unwrap();
|
||||
|
|
|
@ -689,7 +689,7 @@ impl Components {
|
|||
/// This will return an incorrect result if `id` did not come from the same world as `self`. It may return `None` or a garbage value.
|
||||
#[inline]
|
||||
pub fn get_name(&self, id: ComponentId) -> Option<&str> {
|
||||
self.get_info(id).map(|descriptor| descriptor.name())
|
||||
self.get_info(id).map(ComponentInfo::name)
|
||||
}
|
||||
|
||||
/// Gets the metadata associated with the given component.
|
||||
|
|
|
@ -224,7 +224,7 @@ impl<'a, E: Event> EventParIter<'a, E> {
|
|||
.batching_strategy
|
||||
.calc_batch_size(|| self.len(), thread_count);
|
||||
let chunks = self.slices.map(|s| s.chunks_exact(batch_size));
|
||||
let remainders = chunks.each_ref().map(|c| c.remainder());
|
||||
let remainders = chunks.each_ref().map(std::slice::ChunksExact::remainder);
|
||||
|
||||
pool.scope(|scope| {
|
||||
for batch in chunks.into_iter().flatten().chain(remainders) {
|
||||
|
|
|
@ -156,7 +156,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_events_clear_and_read() {
|
||||
events_clear_and_read_impl(|events| events.clear());
|
||||
events_clear_and_read_impl(Events::clear);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1739,12 +1739,12 @@ mod tests {
|
|||
|
||||
let sort_by = query
|
||||
.iter(&world)
|
||||
.sort_by::<Entity>(|e1, e2| e1.cmp(e2))
|
||||
.sort_by::<Entity>(Ord::cmp)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let sort_unstable_by = query
|
||||
.iter(&world)
|
||||
.sort_unstable_by::<Entity>(|e1, e2| e1.cmp(e2))
|
||||
.sort_unstable_by::<Entity>(Ord::cmp)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let sort_by_key = query
|
||||
|
@ -1769,10 +1769,10 @@ mod tests {
|
|||
sort_unstable_v2.sort_unstable();
|
||||
|
||||
let mut sort_by_v2 = query.iter(&world).collect::<Vec<_>>();
|
||||
sort_by_v2.sort_by(|e1, e2| e1.cmp(e2));
|
||||
sort_by_v2.sort_by(Ord::cmp);
|
||||
|
||||
let mut sort_unstable_by_v2 = query.iter(&world).collect::<Vec<_>>();
|
||||
sort_unstable_by_v2.sort_unstable_by(|e1, e2| e1.cmp(e2));
|
||||
sort_unstable_by_v2.sort_unstable_by(Ord::cmp);
|
||||
|
||||
let mut sort_by_key_v2 = query.iter(&world).collect::<Vec<_>>();
|
||||
sort_by_key_v2.sort_by_key(|&e| e);
|
||||
|
|
|
@ -95,7 +95,7 @@ impl<I: SparseSetIndex, V> SparseArray<I, V> {
|
|||
#[inline]
|
||||
pub fn remove(&mut self, index: I) -> Option<V> {
|
||||
let index = index.sparse_set_index();
|
||||
self.values.get_mut(index).and_then(|value| value.take())
|
||||
self.values.get_mut(index).and_then(Option::take)
|
||||
}
|
||||
|
||||
/// Removes all of the values stored within.
|
||||
|
|
|
@ -4,7 +4,7 @@ use super::{Deferred, IntoObserverSystem, IntoSystem, RegisterSystem, Resource};
|
|||
use crate::{
|
||||
self as bevy_ecs,
|
||||
bundle::Bundle,
|
||||
component::ComponentId,
|
||||
component::{ComponentId, ComponentInfo},
|
||||
entity::{Entities, Entity},
|
||||
event::Event,
|
||||
observer::{Observer, TriggerEvent, TriggerTargets},
|
||||
|
@ -1306,7 +1306,7 @@ fn insert_resource<R: Resource>(resource: R) -> impl Command {
|
|||
fn log_components(entity: Entity, world: &mut World) {
|
||||
let debug_infos: Vec<_> = world
|
||||
.inspect_entity(entity)
|
||||
.map(|component_info| component_info.name())
|
||||
.map(ComponentInfo::name)
|
||||
.collect();
|
||||
info!("Entity {:?}: {:?}", entity, debug_infos);
|
||||
}
|
||||
|
|
|
@ -1262,7 +1262,7 @@ impl World {
|
|||
.map(|removed| removed.iter_current_update_events().cloned())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|e| e.into())
|
||||
.map(Into::into)
|
||||
}
|
||||
|
||||
/// Initializes a new resource and returns the [`ComponentId`] created for it.
|
||||
|
@ -1397,7 +1397,7 @@ impl World {
|
|||
self.components
|
||||
.get_resource_id(TypeId::of::<R>())
|
||||
.and_then(|component_id| self.storages.resources.get(component_id))
|
||||
.map(|info| info.is_present())
|
||||
.map(ResourceData::is_present)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
|
@ -1407,7 +1407,7 @@ impl World {
|
|||
self.components
|
||||
.get_resource_id(TypeId::of::<R>())
|
||||
.and_then(|component_id| self.storages.non_send_resources.get(component_id))
|
||||
.map(|info| info.is_present())
|
||||
.map(ResourceData::is_present)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
|
@ -1494,7 +1494,7 @@ impl World {
|
|||
self.storages
|
||||
.resources
|
||||
.get(component_id)
|
||||
.and_then(|resource| resource.get_ticks())
|
||||
.and_then(ResourceData::get_ticks)
|
||||
}
|
||||
|
||||
/// Gets a reference to the resource of the given type
|
||||
|
@ -1868,7 +1868,7 @@ impl World {
|
|||
.storages
|
||||
.resources
|
||||
.get_mut(component_id)
|
||||
.and_then(|info| info.remove())
|
||||
.and_then(ResourceData::remove)
|
||||
.unwrap_or_else(|| panic!("resource does not exist: {}", std::any::type_name::<R>()));
|
||||
// Read the value onto the stack to avoid potential mut aliasing.
|
||||
// SAFETY: `ptr` was obtained from the TypeId of `R`.
|
||||
|
@ -3153,7 +3153,7 @@ mod tests {
|
|||
fn to_type_ids(component_infos: Vec<&ComponentInfo>) -> HashSet<Option<TypeId>> {
|
||||
component_infos
|
||||
.into_iter()
|
||||
.map(|component_info| component_info.type_id())
|
||||
.map(ComponentInfo::type_id)
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
|
|
@ -713,7 +713,7 @@ async fn load_gltf<'a, 'b, 'c>(
|
|||
warn!(
|
||||
"The glTF skin {:?} has {} joints, but the maximum supported is {}",
|
||||
skin.name()
|
||||
.map(|name| name.to_string())
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| skin.index().to_string()),
|
||||
joint_entities.len(),
|
||||
MAX_JOINTS
|
||||
|
@ -790,7 +790,7 @@ fn node_transform(node: &Node) -> Transform {
|
|||
fn node_name(node: &Node) -> Name {
|
||||
let name = node
|
||||
.name()
|
||||
.map(|s| s.to_string())
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| format!("GltfNode{}", node.index()));
|
||||
Name::new(name)
|
||||
}
|
||||
|
@ -828,7 +828,7 @@ async fn load_image<'a, 'b>(
|
|||
#[cfg(all(debug_assertions, feature = "dds"))]
|
||||
let name = gltf_texture
|
||||
.name()
|
||||
.map_or("Unknown GLTF Texture".to_string(), |s| s.to_string());
|
||||
.map_or("Unknown GLTF Texture".to_string(), ToString::to_string);
|
||||
match gltf_texture.source().source() {
|
||||
gltf::image::Source::View { view, mime_type } => {
|
||||
let start = view.offset();
|
||||
|
@ -1778,17 +1778,17 @@ impl<'s> Iterator for PrimitiveMorphAttributesIter<'s> {
|
|||
type Item = MorphAttributes;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let position = self.0 .0.as_mut().and_then(|p| p.next());
|
||||
let normal = self.0 .1.as_mut().and_then(|n| n.next());
|
||||
let tangent = self.0 .2.as_mut().and_then(|t| t.next());
|
||||
let position = self.0 .0.as_mut().and_then(Iterator::next);
|
||||
let normal = self.0 .1.as_mut().and_then(Iterator::next);
|
||||
let tangent = self.0 .2.as_mut().and_then(Iterator::next);
|
||||
if position.is_none() && normal.is_none() && tangent.is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(MorphAttributes {
|
||||
position: position.map(|p| p.into()).unwrap_or(Vec3::ZERO),
|
||||
normal: normal.map(|n| n.into()).unwrap_or(Vec3::ZERO),
|
||||
tangent: tangent.map(|t| t.into()).unwrap_or(Vec3::ZERO),
|
||||
position: position.map(Into::into).unwrap_or(Vec3::ZERO),
|
||||
normal: normal.map(Into::into).unwrap_or(Vec3::ZERO),
|
||||
tangent: tangent.map(Into::into).unwrap_or(Vec3::ZERO),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ impl<'a> BufferAccessor<'a> {
|
|||
/// Creates an iterator over the elements in this accessor
|
||||
fn iter<T: gltf::accessor::Item>(self) -> Result<gltf::accessor::Iter<'a, T>, AccessFailed> {
|
||||
gltf::accessor::Iter::new(self.accessor, |buffer: gltf::Buffer| {
|
||||
self.buffer_data.get(buffer.index()).map(|v| v.as_slice())
|
||||
self.buffer_data.get(buffer.index()).map(Vec::as_slice)
|
||||
})
|
||||
.ok_or(AccessFailed::MalformedData)
|
||||
}
|
||||
|
|
|
@ -680,7 +680,7 @@ mod tests {
|
|||
|
||||
/// Assert the (non)existence and state of the child's [`Parent`] component.
|
||||
fn assert_parent(world: &World, child: Entity, parent: Option<Entity>) {
|
||||
assert_eq!(world.get::<Parent>(child).map(|p| p.get()), parent);
|
||||
assert_eq!(world.get::<Parent>(child).map(Parent::get), parent);
|
||||
}
|
||||
|
||||
/// Assert the (non)existence and state of the parent's [`Children`] component.
|
||||
|
|
|
@ -148,7 +148,7 @@ where
|
|||
type Item = Entity;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.next = self.parent_query.get(self.next?).ok().map(|p| p.get());
|
||||
self.next = self.parent_query.get(self.next?).ok().map(Parent::get);
|
||||
self.next
|
||||
}
|
||||
}
|
||||
|
|
|
@ -390,7 +390,7 @@ impl ShapeSample for Tetrahedron {
|
|||
|
||||
fn sample_boundary<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::Output {
|
||||
let triangles = self.faces();
|
||||
let areas = triangles.iter().map(|t| t.area());
|
||||
let areas = triangles.iter().map(Measured2d::area);
|
||||
|
||||
if areas.clone().sum::<f32>() > 0.0 {
|
||||
// There is at least one triangle with nonzero area, so this unwrap succeeds.
|
||||
|
|
|
@ -700,7 +700,7 @@ pub fn check_dir_light_mesh_visibility(
|
|||
cascade_view_entities.resize(view_frusta.len(), Default::default());
|
||||
cascade_view_entities
|
||||
.iter_mut()
|
||||
.for_each(|x| x.clear::<WithMesh>());
|
||||
.for_each(VisibleEntities::clear::<WithMesh>);
|
||||
}
|
||||
None => views_to_remove.push(*view),
|
||||
};
|
||||
|
@ -790,7 +790,7 @@ pub fn check_dir_light_mesh_visibility(
|
|||
.get_mut(view)
|
||||
.unwrap()
|
||||
.iter_mut()
|
||||
.map(|v| v.get_mut::<WithMesh>())
|
||||
.map(VisibleEntities::get_mut::<WithMesh>)
|
||||
.zip(entities.iter_mut())
|
||||
.for_each(|(dst, source)| {
|
||||
dst.append(source);
|
||||
|
@ -801,7 +801,7 @@ pub fn check_dir_light_mesh_visibility(
|
|||
for (_, cascade_view_entities) in &mut visible_entities.entities {
|
||||
cascade_view_entities
|
||||
.iter_mut()
|
||||
.map(|x| x.get_mut::<WithMesh>())
|
||||
.map(VisibleEntities::get_mut::<WithMesh>)
|
||||
.for_each(shrink_entities);
|
||||
}
|
||||
}
|
||||
|
@ -940,7 +940,7 @@ pub fn check_point_light_mesh_visibility(
|
|||
for entities in cubemap_visible_entities_queue.iter_mut() {
|
||||
cubemap_visible_entities
|
||||
.iter_mut()
|
||||
.map(|v| v.get_mut::<WithMesh>())
|
||||
.map(VisibleEntities::get_mut::<WithMesh>)
|
||||
.zip(entities.iter_mut())
|
||||
.for_each(|(dst, source)| dst.append(source));
|
||||
}
|
||||
|
|
|
@ -437,11 +437,11 @@ fn upload_light_probes(
|
|||
reflection_probes: [RenderLightProbe::default(); MAX_VIEW_LIGHT_PROBES],
|
||||
irradiance_volumes: [RenderLightProbe::default(); MAX_VIEW_LIGHT_PROBES],
|
||||
reflection_probe_count: render_view_environment_maps
|
||||
.map(|maps| maps.len())
|
||||
.map(RenderViewLightProbes::len)
|
||||
.unwrap_or_default()
|
||||
.min(MAX_VIEW_LIGHT_PROBES) as i32,
|
||||
irradiance_volume_count: render_view_irradiance_volumes
|
||||
.map(|maps| maps.len())
|
||||
.map(RenderViewLightProbes::len)
|
||||
.unwrap_or_default()
|
||||
.min(MAX_VIEW_LIGHT_PROBES) as i32,
|
||||
view_cubemap_index: render_view_environment_maps
|
||||
|
|
|
@ -1018,7 +1018,7 @@ impl AsBindGroupShaderType<StandardMaterialUniform> for StandardMaterial {
|
|||
|
||||
let has_normal_map = self.normal_map_texture.is_some();
|
||||
if has_normal_map {
|
||||
let normal_map_id = self.normal_map_texture.as_ref().map(|h| h.id()).unwrap();
|
||||
let normal_map_id = self.normal_map_texture.as_ref().map(Handle::id).unwrap();
|
||||
if let Some(texture) = images.get(normal_map_id) {
|
||||
match texture.texture_format {
|
||||
// All 2-component unorm formats
|
||||
|
|
|
@ -90,7 +90,7 @@ impl FromReflectAttrs {
|
|||
pub fn should_auto_derive(&self) -> bool {
|
||||
self.auto_derive
|
||||
.as_ref()
|
||||
.map(|lit| lit.value())
|
||||
.map(LitBool::value)
|
||||
.unwrap_or(true)
|
||||
}
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ impl TypePathAttrs {
|
|||
pub fn should_auto_derive(&self) -> bool {
|
||||
self.auto_derive
|
||||
.as_ref()
|
||||
.map(|lit| lit.value())
|
||||
.map(LitBool::value)
|
||||
.unwrap_or(true)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -652,9 +652,7 @@ impl<'a> ReflectEnum<'a> {
|
|||
|
||||
/// Get an iterator of fields which are exposed to the reflection API
|
||||
pub fn active_fields(&self) -> impl Iterator<Item = &StructField<'a>> {
|
||||
self.variants
|
||||
.iter()
|
||||
.flat_map(|variant| variant.active_fields())
|
||||
self.variants.iter().flat_map(EnumVariant::active_fields)
|
||||
}
|
||||
|
||||
pub fn where_clause_options(&self) -> WhereClauseOptions {
|
||||
|
|
|
@ -18,7 +18,7 @@ pub(crate) fn impl_struct(reflect_struct: &ReflectStruct) -> proc_macro2::TokenS
|
|||
.data
|
||||
.ident
|
||||
.as_ref()
|
||||
.map(|i| i.to_string())
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| field.declaration_index.to_string())
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
|
|
|
@ -70,7 +70,7 @@ pub trait Array: Reflect {
|
|||
fn clone_dynamic(&self) -> DynamicArray {
|
||||
DynamicArray {
|
||||
represented_type: self.get_represented_type_info(),
|
||||
values: self.iter().map(|value| value.clone_value()).collect(),
|
||||
values: self.iter().map(Reflect::clone_value).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ impl EnumInfo {
|
|||
.map(|(index, variant)| (variant.name(), index))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let variant_names = variants.iter().map(|variant| variant.name()).collect();
|
||||
let variant_names = variants.iter().map(VariantInfo::name).collect();
|
||||
|
||||
Self {
|
||||
type_path: TypePathTable::of::<TEnum>(),
|
||||
|
|
|
@ -112,7 +112,7 @@ impl StructVariantInfo {
|
|||
/// Create a new [`StructVariantInfo`].
|
||||
pub fn new(name: &'static str, fields: &[NamedField]) -> Self {
|
||||
let field_indices = Self::collect_field_indices(fields);
|
||||
let field_names = fields.iter().map(|field| field.name()).collect();
|
||||
let field_names = fields.iter().map(NamedField::name).collect();
|
||||
Self {
|
||||
name,
|
||||
fields: fields.to_vec().into_boxed_slice(),
|
||||
|
|
|
@ -101,7 +101,7 @@ pub trait List: Reflect {
|
|||
fn clone_dynamic(&self) -> DynamicList {
|
||||
DynamicList {
|
||||
represented_type: self.get_represented_type_info(),
|
||||
values: self.iter().map(|value| value.clone_value()).collect(),
|
||||
values: self.iter().map(Reflect::clone_value).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -920,7 +920,7 @@ impl<'de> DeserializeSeed<'de> for VariantDeserializer {
|
|||
E: Error,
|
||||
{
|
||||
self.0.variant(variant_name).ok_or_else(|| {
|
||||
let names = self.0.iter().map(|variant| variant.name());
|
||||
let names = self.0.iter().map(VariantInfo::name);
|
||||
Error::custom(format_args!(
|
||||
"unknown variant `{}`, expected one of {:?}",
|
||||
variant_name,
|
||||
|
@ -1046,7 +1046,7 @@ where
|
|||
let mut dynamic_struct = DynamicStruct::default();
|
||||
while let Some(Ident(key)) = map.next_key::<Ident>()? {
|
||||
let field = info.get_field(&key).ok_or_else(|| {
|
||||
let fields = info.iter_fields().map(|field| field.name());
|
||||
let fields = info.iter_fields().map(NamedField::name);
|
||||
Error::custom(format_args!(
|
||||
"unknown field `{}`, expected one of {:?}",
|
||||
key,
|
||||
|
|
|
@ -282,7 +282,7 @@ impl<'a> Serialize for StructSerializer<'a> {
|
|||
.registry
|
||||
.get(type_info.type_id())
|
||||
.and_then(|registration| registration.data::<SerializationData>());
|
||||
let ignored_len = serialization_data.map(|data| data.len()).unwrap_or(0);
|
||||
let ignored_len = serialization_data.map(SerializationData::len).unwrap_or(0);
|
||||
let mut state = serializer.serialize_struct(
|
||||
struct_info.type_path_table().ident().unwrap(),
|
||||
self.struct_value.field_len() - ignored_len,
|
||||
|
@ -335,7 +335,7 @@ impl<'a> Serialize for TupleStructSerializer<'a> {
|
|||
.registry
|
||||
.get(type_info.type_id())
|
||||
.and_then(|registration| registration.data::<SerializationData>());
|
||||
let ignored_len = serialization_data.map(|data| data.len()).unwrap_or(0);
|
||||
let ignored_len = serialization_data.map(SerializationData::len).unwrap_or(0);
|
||||
let mut state = serializer.serialize_tuple_struct(
|
||||
tuple_struct_info.type_path_table().ident().unwrap(),
|
||||
self.tuple_struct.field_len() - ignored_len,
|
||||
|
|
|
@ -90,7 +90,7 @@ impl SerializationData {
|
|||
pub fn generate_default(&self, index: usize) -> Option<Box<dyn Reflect>> {
|
||||
self.skipped_fields
|
||||
.get(&index)
|
||||
.map(|field| field.generate_default())
|
||||
.map(SkippedField::generate_default)
|
||||
}
|
||||
|
||||
/// Returns the number of skipped fields.
|
||||
|
|
|
@ -103,7 +103,7 @@ impl StructInfo {
|
|||
.map(|(index, field)| (field.name(), index))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let field_names = fields.iter().map(|field| field.name()).collect();
|
||||
let field_names = fields.iter().map(NamedField::name).collect();
|
||||
|
||||
Self {
|
||||
type_path: TypePathTable::of::<T>(),
|
||||
|
@ -369,7 +369,7 @@ impl Struct for DynamicStruct {
|
|||
|
||||
#[inline]
|
||||
fn name_at(&self, index: usize) -> Option<&str> {
|
||||
self.field_names.get(index).map(|name| name.as_ref())
|
||||
self.field_names.get(index).map(AsRef::as_ref)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -566,7 +566,7 @@ pub fn struct_debug(dyn_struct: &dyn Struct, f: &mut Formatter<'_>) -> std::fmt:
|
|||
let mut debug = f.debug_struct(
|
||||
dyn_struct
|
||||
.get_represented_type_info()
|
||||
.map(|s| s.type_path())
|
||||
.map(TypeInfo::type_path)
|
||||
.unwrap_or("_"),
|
||||
);
|
||||
for field_index in 0..dyn_struct.field_len() {
|
||||
|
|
|
@ -483,7 +483,7 @@ pub fn tuple_struct_debug(
|
|||
let mut debug = f.debug_tuple(
|
||||
dyn_tuple_struct
|
||||
.get_represented_type_info()
|
||||
.map(|s| s.type_path())
|
||||
.map(TypeInfo::type_path)
|
||||
.unwrap_or("_"),
|
||||
);
|
||||
for field in dyn_tuple_struct.iter_fields() {
|
||||
|
|
|
@ -395,8 +395,7 @@ impl TypeRegistry {
|
|||
///
|
||||
/// If the specified type has not been registered, returns `None`.
|
||||
pub fn get_type_info(&self, type_id: TypeId) -> Option<&'static TypeInfo> {
|
||||
self.get(type_id)
|
||||
.map(|registration| registration.type_info())
|
||||
self.get(type_id).map(TypeRegistration::type_info)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the [`TypeRegistration`]s of the registered
|
||||
|
|
|
@ -901,7 +901,7 @@ pub fn extract_cameras(
|
|||
// this will be set in sort_cameras
|
||||
sorted_camera_index_for_target: 0,
|
||||
exposure: exposure
|
||||
.map(|e| e.exposure())
|
||||
.map(Exposure::exposure)
|
||||
.unwrap_or_else(|| Exposure::default().exposure()),
|
||||
hdr: camera.hdr,
|
||||
},
|
||||
|
|
|
@ -44,7 +44,7 @@ impl Meshable for Triangle3d {
|
|||
/// The normal of a [`Triangle3d`] with zeroing so that a [`Vec3`] is always obtained for meshing.
|
||||
#[inline]
|
||||
pub(crate) fn normal_vec(triangle: &Triangle3d) -> Vec3 {
|
||||
triangle.normal().map_or(Vec3::ZERO, |n| n.into())
|
||||
triangle.normal().map_or(Vec3::ZERO, Into::into)
|
||||
}
|
||||
|
||||
/// Unskewed uv-coordinates for a [`Triangle3d`].
|
||||
|
|
|
@ -760,7 +760,7 @@ where
|
|||
/// An [`Iterator`] through the associated [`Entity`] for each [`PhaseItem`] in order.
|
||||
#[inline]
|
||||
pub fn iter_entities(&'_ self) -> impl Iterator<Item = Entity> + '_ {
|
||||
self.items.iter().map(|item| item.entity())
|
||||
self.items.iter().map(PhaseItem::entity)
|
||||
}
|
||||
|
||||
/// Renders all of its [`PhaseItem`]s using their corresponding draw functions.
|
||||
|
@ -1045,7 +1045,7 @@ pub trait SortedPhaseItem: PhaseItem {
|
|||
/// It's advised to always profile for performance changes when changing this implementation.
|
||||
#[inline]
|
||||
fn sort(items: &mut [Self]) {
|
||||
items.sort_unstable_by_key(|item| item.sort_key());
|
||||
items.sort_unstable_by_key(Self::sort_key);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -272,7 +272,7 @@ all_tuples_with_size!(impl_to_indexed_binding_type_slice, 1, 32, T, n, s);
|
|||
|
||||
impl<const N: usize> IntoBindGroupLayoutEntryBuilderArray<N> for [BindGroupLayoutEntry; N] {
|
||||
fn into_array(self) -> [BindGroupLayoutEntryBuilder; N] {
|
||||
self.map(|x| x.into_bind_group_layout_entry_builder())
|
||||
self.map(IntoBindGroupLayoutEntryBuilder::into_bind_group_layout_entry_builder)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -423,13 +423,13 @@ impl LayoutCache {
|
|||
bind_group_layouts: &[BindGroupLayout],
|
||||
push_constant_ranges: Vec<PushConstantRange>,
|
||||
) -> ErasedPipelineLayout {
|
||||
let bind_group_ids = bind_group_layouts.iter().map(|l| l.id()).collect();
|
||||
let bind_group_ids = bind_group_layouts.iter().map(BindGroupLayout::id).collect();
|
||||
self.layouts
|
||||
.entry((bind_group_ids, push_constant_ranges))
|
||||
.or_insert_with_key(|(_, push_constant_ranges)| {
|
||||
let bind_group_layouts = bind_group_layouts
|
||||
.iter()
|
||||
.map(|l| l.value())
|
||||
.map(BindGroupLayout::value)
|
||||
.collect::<Vec<_>>();
|
||||
ErasedPipelineLayout::new(render_device.create_pipeline_layout(
|
||||
&PipelineLayoutDescriptor {
|
||||
|
|
|
@ -352,7 +352,7 @@ pub async fn initialize_renderer(
|
|||
let (device, queue) = adapter
|
||||
.request_device(
|
||||
&wgpu::DeviceDescriptor {
|
||||
label: options.device_label.as_ref().map(|a| a.as_ref()),
|
||||
label: options.device_label.as_ref().map(AsRef::as_ref),
|
||||
required_features: features,
|
||||
required_limits: limits,
|
||||
},
|
||||
|
|
|
@ -458,7 +458,7 @@ impl From<wgpu::SamplerBorderColor> for ImageSamplerBorderColor {
|
|||
impl<'a> From<wgpu::SamplerDescriptor<'a>> for ImageSamplerDescriptor {
|
||||
fn from(value: wgpu::SamplerDescriptor) -> Self {
|
||||
ImageSamplerDescriptor {
|
||||
label: value.label.map(|l| l.to_string()),
|
||||
label: value.label.map(ToString::to_string),
|
||||
address_mode_u: value.address_mode_u.into(),
|
||||
address_mode_v: value.address_mode_v.into(),
|
||||
address_mode_w: value.address_mode_w.into(),
|
||||
|
|
|
@ -78,7 +78,7 @@ pub fn ktx2_buffer_to_image(
|
|||
}
|
||||
}
|
||||
} else {
|
||||
levels = ktx2.levels().map(|level| level.to_vec()).collect();
|
||||
levels = ktx2.levels().map(<[u8]>::to_vec).collect();
|
||||
}
|
||||
|
||||
// Identify the format
|
||||
|
|
|
@ -899,7 +899,7 @@ pub fn prepare_view_targets(
|
|||
(a, b, sampled, main_texture)
|
||||
});
|
||||
|
||||
let converted_clear_color = clear_color.map(|color| color.into());
|
||||
let converted_clear_color = clear_color.map(Into::into);
|
||||
|
||||
let main_textures = MainTargetTextures {
|
||||
a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),
|
||||
|
|
|
@ -43,7 +43,7 @@ impl std::fmt::Debug for RenderLayers {
|
|||
|
||||
impl FromIterator<Layer> for RenderLayers {
|
||||
fn from_iter<T: IntoIterator<Item = Layer>>(i: T) -> Self {
|
||||
i.into_iter().fold(Self::none(), |mask, g| mask.with(g))
|
||||
i.into_iter().fold(Self::none(), RenderLayers::with)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -201,7 +201,7 @@ pub fn calculate_bounds_2d(
|
|||
.or_else(|| sprite.rect.map(|rect| rect.size()))
|
||||
.or_else(|| match atlas {
|
||||
// We default to the texture size for regular sprites
|
||||
None => images.get(texture_handle).map(|image| image.size_f32()),
|
||||
None => images.get(texture_handle).map(Image::size_f32),
|
||||
// We default to the drawn rect for atlas sprites
|
||||
Some(atlas) => atlas
|
||||
.texture_rect(&atlases)
|
||||
|
|
|
@ -113,7 +113,7 @@ where
|
|||
// This extends each batch using the flatten. The other option is to
|
||||
// turn each IntoIter into its own batch.
|
||||
fn next_batch(&mut self) -> Option<std::iter::Flatten<B>> {
|
||||
self.iter.next_batch().map(|b| b.flatten())
|
||||
self.iter.next_batch().map(Iterator::flatten)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,7 +167,7 @@ where
|
|||
T: 'a + Copy,
|
||||
{
|
||||
fn next_batch(&mut self) -> Option<std::iter::Copied<B>> {
|
||||
self.iter.next_batch().map(|b| b.copied())
|
||||
self.iter.next_batch().map(Iterator::copied)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,7 +183,7 @@ where
|
|||
T: 'a + Copy,
|
||||
{
|
||||
fn next_batch(&mut self) -> Option<std::iter::Cloned<B>> {
|
||||
self.iter.next_batch().map(|b| b.cloned())
|
||||
self.iter.next_batch().map(Iterator::cloned)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -199,9 +199,12 @@ where
|
|||
P: ParallelIterator<B> + Clone,
|
||||
{
|
||||
fn next_batch(&mut self) -> Option<B> {
|
||||
self.curr.as_mut().and_then(|c| c.next_batch()).or_else(|| {
|
||||
self.curr = Some(self.iter.clone());
|
||||
self.next_batch()
|
||||
})
|
||||
self.curr
|
||||
.as_mut()
|
||||
.and_then(ParallelIterator::next_batch)
|
||||
.or_else(|| {
|
||||
self.curr = Some(self.iter.clone());
|
||||
self.next_batch()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,7 +122,7 @@ impl TaskPool {
|
|||
|
||||
/// Each thread should only create one `ThreadExecutor`, otherwise, there are good chances they will deadlock
|
||||
pub fn get_thread_executor() -> Arc<ThreadExecutor<'static>> {
|
||||
Self::THREAD_EXECUTOR.with(|executor| executor.clone())
|
||||
Self::THREAD_EXECUTOR.with(Clone::clone)
|
||||
}
|
||||
|
||||
/// Create a `TaskPool` with the default configuration.
|
||||
|
|
|
@ -31,7 +31,7 @@ fn calc_name(texts: &Query<&Text>, children: &Children) -> Option<Box<str>> {
|
|||
name = Some(values.join(" "));
|
||||
}
|
||||
}
|
||||
name.map(|v| v.into_boxed_str())
|
||||
name.map(String::into_boxed_str)
|
||||
}
|
||||
|
||||
fn calc_bounds(
|
||||
|
|
|
@ -204,7 +204,7 @@ pub fn ui_focus_system(
|
|||
windows
|
||||
.get(window_ref.entity())
|
||||
.ok()
|
||||
.and_then(|window| window.cursor_position())
|
||||
.and_then(Window::cursor_position)
|
||||
.or_else(|| touches_input.first_pressed_position())
|
||||
.map(|cursor_position| (entity, cursor_position - viewport_position))
|
||||
})
|
||||
|
|
|
@ -293,7 +293,7 @@ pub fn resolve_outlines_system(
|
|||
) {
|
||||
let viewport_size = primary_window
|
||||
.get_single()
|
||||
.map(|window| window.size())
|
||||
.map(Window::size)
|
||||
.unwrap_or(Vec2::ZERO)
|
||||
/ ui_scale.0;
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ impl SortedPhaseItem for TransparentUi {
|
|||
|
||||
#[inline]
|
||||
fn sort(items: &mut [Self]) {
|
||||
items.sort_by_key(|item| item.sort_key());
|
||||
items.sort_by_key(SortedPhaseItem::sort_key);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -377,7 +377,7 @@ pub fn extract_ui_material_nodes<M: UiMaterial>(
|
|||
) {
|
||||
let ui_logical_viewport_size = windows
|
||||
.get_single()
|
||||
.map(|window| window.size())
|
||||
.map(Window::size)
|
||||
.unwrap_or(Vec2::ZERO)
|
||||
// The logical window resolution returned by `Window` only takes into account the window scale factor and not `UiScale`,
|
||||
// so we have to divide by `UiScale` to get the size of the UI viewport.
|
||||
|
|
|
@ -120,7 +120,7 @@ pub fn update_image_content_size_system(
|
|||
for (mut content_size, image, mut image_size, atlas_image) in &mut query {
|
||||
if let Some(size) = match atlas_image {
|
||||
Some(atlas) => atlas.texture_rect(&atlases).map(|t| t.size()),
|
||||
None => textures.get(&image.texture).map(|t| t.size()),
|
||||
None => textures.get(&image.texture).map(Image::size),
|
||||
} {
|
||||
// Update only if size or scale factor has changed to avoid needless layout calculations
|
||||
if size != image_size.size
|
||||
|
|
|
@ -13,7 +13,7 @@ pub struct Parallel<T: Send> {
|
|||
impl<T: Send> Parallel<T> {
|
||||
/// Gets a mutable iterator over all of the per-thread queues.
|
||||
pub fn iter_mut(&mut self) -> impl Iterator<Item = &'_ mut T> {
|
||||
self.locals.iter_mut().map(|cell| cell.get_mut())
|
||||
self.locals.iter_mut().map(RefCell::get_mut)
|
||||
}
|
||||
|
||||
/// Clears all of the stored thread local values.
|
||||
|
|
|
@ -474,11 +474,7 @@ fn handle_mouse_clicks(
|
|||
if !buttons.pressed(MouseButton::Left) {
|
||||
return;
|
||||
}
|
||||
let Some(mouse_position) = windows
|
||||
.iter()
|
||||
.next()
|
||||
.and_then(|window| window.cursor_position())
|
||||
else {
|
||||
let Some(mouse_position) = windows.iter().next().and_then(Window::cursor_position) else {
|
||||
return;
|
||||
};
|
||||
let Some((camera, camera_transform)) = cameras.iter().next() else {
|
||||
|
|
|
@ -79,7 +79,7 @@ fn main() {
|
|||
let Some(name) = component.next() else {
|
||||
return;
|
||||
};
|
||||
let size = match component.next().map(|s| s.parse::<usize>()) {
|
||||
let size = match component.next().map(str::parse) {
|
||||
Some(Ok(size)) => size,
|
||||
_ => 0,
|
||||
};
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::f32::consts::TAU;
|
||||
|
||||
use bevy::{
|
||||
diagnostic::{DiagnosticsStore, FrameTimeDiagnosticsPlugin},
|
||||
diagnostic::{Diagnostic, DiagnosticsStore, FrameTimeDiagnosticsPlugin},
|
||||
prelude::*,
|
||||
window::{PresentMode, WindowResolution},
|
||||
winit::{UpdateMode, WinitSettings},
|
||||
|
@ -102,7 +102,7 @@ fn ui_system(mut query: Query<&mut Text>, config: Res<Config>, diag: Res<Diagnos
|
|||
|
||||
let Some(fps) = diag
|
||||
.get(&FrameTimeDiagnosticsPlugin::FPS)
|
||||
.and_then(|fps| fps.smoothed())
|
||||
.and_then(Diagnostic::smoothed)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
|
|
@ -108,7 +108,7 @@ fn assign_clips(
|
|||
}
|
||||
|
||||
// Go to the next parent.
|
||||
current = parents.get(entity).ok().map(|parent| parent.get());
|
||||
current = parents.get(entity).ok().map(Parent::get);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -132,12 +132,12 @@ impl Target {
|
|||
entity: Entity,
|
||||
) -> Vec<Target> {
|
||||
let get_name = |i| target_names.and_then(|names| names.get(i));
|
||||
let entity_name = entity_name.map(|n| n.as_str());
|
||||
let entity_name = entity_name.map(Name::as_str);
|
||||
weights
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, weight)| Target {
|
||||
entity_name: entity_name.map(|n| n.to_owned()),
|
||||
entity_name: entity_name.map(ToOwned::to_owned),
|
||||
entity,
|
||||
name: get_name(index).cloned(),
|
||||
index,
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::{cmp::Ordering, fs::File};
|
|||
use hashbrown::HashMap;
|
||||
use serde::Serialize;
|
||||
use tera::{Context, Tera};
|
||||
use toml_edit::DocumentMut;
|
||||
use toml_edit::{DocumentMut, Item};
|
||||
|
||||
use crate::Command;
|
||||
|
||||
|
@ -65,7 +65,7 @@ fn parse_examples(panic_on_missing: bool) -> Vec<Example> {
|
|||
if metadatas
|
||||
.get(&technical_name)
|
||||
.and_then(|metadata| metadata.get("hidden"))
|
||||
.and_then(|hidden| hidden.as_bool())
|
||||
.and_then(Item::as_bool)
|
||||
.and_then(|hidden| hidden.then_some(()))
|
||||
.is_some()
|
||||
{
|
||||
|
|
|
@ -47,7 +47,7 @@ fn main() {
|
|||
assert!(!cli.examples.is_empty(), "must have at least one example");
|
||||
|
||||
let default_features = true;
|
||||
let mut features: Vec<&str> = cli.features.iter().map(|f| f.as_str()).collect();
|
||||
let mut features: Vec<&str> = cli.features.iter().map(String::as_str).collect();
|
||||
if let Some(frames) = cli.frames {
|
||||
let mut file = File::create("ci_testing_config.ron").unwrap();
|
||||
file.write_fmt(format_args!("(events: [({frames}, AppExit)])"))
|
||||
|
|
|
@ -15,7 +15,7 @@ use std::{
|
|||
use clap::{error::ErrorKind, CommandFactory, Parser, ValueEnum};
|
||||
use pbr::ProgressBar;
|
||||
use regex::Regex;
|
||||
use toml_edit::DocumentMut;
|
||||
use toml_edit::{DocumentMut, Item};
|
||||
use xshell::{cmd, Shell};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
|
@ -159,7 +159,7 @@ fn main() {
|
|||
.as_ref()
|
||||
.map(|path| {
|
||||
let file = fs::read_to_string(path).unwrap();
|
||||
file.lines().map(|l| l.to_string()).collect::<Vec<_>>()
|
||||
file.lines().map(ToString::to_string).collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
|
@ -286,7 +286,7 @@ fn main() {
|
|||
};
|
||||
let local_extra_parameters = extra_parameters
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.map(ToString::to_string)
|
||||
.chain(required_features.iter().cloned())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
@ -302,7 +302,7 @@ fn main() {
|
|||
).run();
|
||||
let local_extra_parameters = extra_parameters
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.map(ToString::to_string)
|
||||
.chain(required_features.iter().cloned())
|
||||
.collect::<Vec<_>>();
|
||||
let mut cmd = cmd!(
|
||||
|
@ -745,7 +745,7 @@ fn parse_examples() -> Vec<Example> {
|
|||
if metadatas
|
||||
.get(&technical_name)
|
||||
.and_then(|metadata| metadata.get("hidden"))
|
||||
.and_then(|hidden| hidden.as_bool())
|
||||
.and_then(Item::as_bool)
|
||||
.and_then(|hidden| hidden.then_some(()))
|
||||
.is_some()
|
||||
{
|
||||
|
|
Loading…
Reference in a new issue