Merge pull request #206 from multun/clippy

Add clippy support and fix all warnings / errors
This commit is contained in:
Carter Anderson 2020-08-19 21:33:28 -07:00 committed by GitHub
commit e31f576484
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
99 changed files with 451 additions and 367 deletions

View file

@ -26,7 +26,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly
components: rustfmt
components: rustfmt, clippy
- name: Install alsa
run: sudo apt-get install libasound2-dev
@ -37,6 +37,15 @@ jobs:
- name: Check the format
run: cargo +nightly fmt --all -- --check
# type complexity must be ignored because we use huge templates for queries
- name: Run clippy
run: >
cargo +nightly clippy
--all-targets
--all-features
--
-D warnings
-A clippy::type_complexity
- name: Run tests
run: cargo test --workspace

View file

@ -1,6 +1,7 @@
use crate::app_builder::AppBuilder;
use bevy_ecs::{ParallelExecutor, Resources, Schedule, World};
#[allow(clippy::needless_doctest_main)]
/// Containers of app logic and data
///
/// App store the ECS World, Resources, Schedule, and Executor. They also store the "run" function of the App, which

View file

@ -39,7 +39,7 @@ impl AppBuilder {
}
pub fn run(&mut self) {
let app = std::mem::replace(&mut self.app, App::default());
let app = std::mem::take(&mut self.app);
app.run();
}
@ -209,7 +209,7 @@ impl AppBuilder {
where
R: FromResources + Send + Sync + 'static,
{
let resource = R::from_resources(&mut self.app.resources);
let resource = R::from_resources(&self.app.resources);
self.app.resources.insert(resource);
self

View file

@ -159,7 +159,7 @@ impl<T> EventReader<T> {
events: &'a Events<T>,
predicate: impl FnMut(&&T) -> bool,
) -> Option<&'a T> {
self.iter(events).rev().filter(predicate).next()
self.iter(events).rev().find(predicate)
}
/// Retrieves the earliest event in `events` that this reader hasn't seen yet. This updates the EventReader's

View file

@ -172,7 +172,7 @@ impl AssetServer {
pub fn watch_for_changes(&self) -> Result<(), AssetServerError> {
let mut filesystem_watcher = self.filesystem_watcher.write().unwrap();
let _ = filesystem_watcher.get_or_insert_with(|| FilesystemWatcher::default());
let _ = filesystem_watcher.get_or_insert_with(FilesystemWatcher::default);
// watch current files
let asset_info_paths = self.asset_info_paths.read().unwrap();
for asset_path in asset_info_paths.keys() {
@ -186,44 +186,37 @@ impl AssetServer {
pub fn filesystem_watcher_system(asset_server: Res<AssetServer>) {
use notify::event::{Event, EventKind, ModifyKind};
let mut changed = HashSet::new();
loop {
let result = if let Some(filesystem_watcher) =
asset_server.filesystem_watcher.read().unwrap().as_ref()
{
match filesystem_watcher.receiver.try_recv() {
Ok(result) => result,
Err(TryRecvError::Empty) => {
break;
}
Err(TryRecvError::Disconnected) => panic!("FilesystemWatcher disconnected"),
while let Some(filesystem_watcher) =
asset_server.filesystem_watcher.read().unwrap().as_ref()
{
let result = match filesystem_watcher.receiver.try_recv() {
Ok(result) => result,
Err(TryRecvError::Empty) => {
break;
}
} else {
break;
Err(TryRecvError::Disconnected) => panic!("FilesystemWatcher disconnected"),
};
let event = result.unwrap();
match event {
Event {
kind: EventKind::Modify(ModifyKind::Data(_)),
paths,
..
} => {
for path in paths.iter() {
if !changed.contains(path) {
let root_path = asset_server.get_root_path().unwrap();
let relative_path = path.strip_prefix(root_path).unwrap();
match asset_server.load_untyped(relative_path) {
Ok(_) => {}
Err(AssetServerError::AssetLoadError(error)) => {
panic!("{:?}", error)
}
Err(_) => {}
}
if let Event {
kind: EventKind::Modify(ModifyKind::Data(_)),
paths,
..
} = event
{
for path in paths.iter() {
if !changed.contains(path) {
let root_path = asset_server.get_root_path().unwrap();
let relative_path = path.strip_prefix(root_path).unwrap();
match asset_server.load_untyped(relative_path) {
Ok(_) => {}
Err(AssetServerError::AssetLoadError(error)) => panic!("{:?}", error),
Err(_) => {}
}
}
changed.extend(paths);
}
_ => {}
changed.extend(paths);
}
}
}
@ -244,8 +237,7 @@ impl AssetServer {
// TODO: add type checking here. people shouldn't be able to request a Handle<Texture> for a Mesh asset
pub fn load<T, P: AsRef<Path>>(&self, path: P) -> Result<Handle<T>, AssetServerError> {
self.load_untyped(path)
.map(|handle_id| Handle::from(handle_id))
self.load_untyped(path).map(Handle::from)
}
pub fn load_sync<T: Resource, P: AsRef<Path>>(
@ -338,15 +330,11 @@ impl AssetServer {
}
pub fn set_load_state(&self, handle_id: HandleId, load_state: LoadState) {
self.asset_info
.write()
.unwrap()
.get_mut(&handle_id)
.map(|asset_info| {
if load_state.get_version() >= asset_info.load_state.get_version() {
asset_info.load_state = load_state;
}
});
if let Some(asset_info) = self.asset_info.write().unwrap().get_mut(&handle_id) {
if load_state.get_version() >= asset_info.load_state.get_version() {
asset_info.load_state = load_state;
}
}
}
pub fn get_load_state_untyped(&self, handle_id: HandleId) -> Option<LoadState> {
@ -453,7 +441,7 @@ impl AssetServer {
) {
Ok(handle) => handle,
Err(AssetServerError::MissingAssetHandler) => continue,
Err(err) => Err(err)?,
Err(err) => return Err(err),
};
handle_ids.push(handle);

View file

@ -17,6 +17,7 @@ pub(crate) const DEFAULT_HANDLE_ID: HandleId =
pub struct HandleId(pub Uuid);
impl HandleId {
#[allow(clippy::new_without_default)]
pub fn new() -> HandleId {
HandleId(Uuid::new_v4())
}
@ -151,7 +152,7 @@ impl<T> Default for Handle<T> {
impl<T> Clone for Handle<T> {
fn clone(&self) -> Self {
Handle {
id: self.id.clone(),
id: self.id,
marker: PhantomData,
}
}

View file

@ -47,6 +47,7 @@ pub struct AssetChannel<T: 'static> {
}
impl<T> AssetChannel<T> {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
let (sender, receiver) = crossbeam_channel::unbounded();
AssetChannel { sender, receiver }

View file

@ -144,7 +144,7 @@ where
T: FromBytes,
{
fn from_bytes(bytes: &[u8]) -> Self {
if bytes.len() == 0 {
if bytes.is_empty() {
None
} else {
Some(T::from_bytes(bytes))

View file

@ -11,6 +11,7 @@ use std::{
#[derive(Debug, Copy, Clone, PartialOrd)]
pub struct FloatOrd(pub f32);
#[allow(clippy::derive_ord_xor_partial_ord)]
impl Ord for FloatOrd {
fn cmp(&self, other: &Self) -> Ordering {
self.0.partial_cmp(&other.0).unwrap_or_else(|| {

View file

@ -77,7 +77,7 @@ pub(crate) fn entity_labels_system(
let current_labels = entity_labels
.entity_labels
.entry(entity)
.or_insert_with(|| HashSet::default());
.or_insert_with(HashSet::default);
for removed_label in current_labels.difference(&labels.labels) {
if let Some(entities) = entity_labels.label_entities.get_mut(removed_label) {
entities.retain(|e| *e != entity);

View file

@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod time;
mod timer;

View file

@ -12,7 +12,7 @@ struct VertexAttributes {
pub instance: bool,
}
static VERTEX_ATTRIBUTE_NAME: &'static str = "vertex";
static VERTEX_ATTRIBUTE_NAME: &str = "vertex";
pub fn derive_as_vertex_buffer_descriptor(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
@ -34,26 +34,21 @@ pub fn derive_as_vertex_buffer_descriptor(input: TokenStream) -> TokenStream {
field
.attrs
.iter()
.find(|a| {
a.path.get_ident().as_ref().unwrap().to_string() == VERTEX_ATTRIBUTE_NAME
})
.map_or_else(
|| VertexAttributes::default(),
|a| {
syn::custom_keyword!(ignore);
let mut vertex_attributes = VertexAttributes::default();
a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<ignore>>()? {
vertex_attributes.ignore = true;
return Ok(());
}
Ok(())
})
.expect("invalid 'vertex' attribute format");
.find(|a| *a.path.get_ident().as_ref().unwrap() == VERTEX_ATTRIBUTE_NAME)
.map_or_else(VertexAttributes::default, |a| {
syn::custom_keyword!(ignore);
let mut vertex_attributes = VertexAttributes::default();
a.parse_args_with(|input: ParseStream| {
if input.parse::<Option<ignore>>()?.is_some() {
vertex_attributes.ignore = true;
return Ok(());
}
Ok(())
})
.expect("invalid 'vertex' attribute format");
vertex_attributes
},
),
vertex_attributes
}),
)
})
.collect::<Vec<(&Field, VertexAttributes)>>();

View file

@ -44,9 +44,9 @@ pub fn get_modules(attributes: &[Attribute]) -> Modules {
};
for attribute in attributes.iter() {
if attribute.path.get_ident().as_ref().unwrap().to_string() == AS_CRATE_ATTRIBUTE_NAME {
if *attribute.path.get_ident().as_ref().unwrap() == AS_CRATE_ATTRIBUTE_NAME {
let value = attribute.tokens.to_string();
if &value[1..value.len() - 1] == modules.bevy_render {
if value[1..value.len() - 1] == modules.bevy_render {
modules.bevy_render = "crate".to_string();
}
}

View file

@ -16,7 +16,7 @@ struct RenderResourceAttributes {
pub from_self: bool,
}
static RENDER_RESOURCE_ATTRIBUTE_NAME: &'static str = "render_resources";
static RENDER_RESOURCE_ATTRIBUTE_NAME: &str = "render_resources";
pub fn derive_render_resources(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
@ -26,25 +26,20 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
let attributes = ast
.attrs
.iter()
.find(|a| {
a.path.get_ident().as_ref().unwrap().to_string() == RENDER_RESOURCE_ATTRIBUTE_NAME
})
.map_or_else(
|| RenderResourceAttributes::default(),
|a| {
syn::custom_keyword!(from_self);
let mut attributes = RenderResourceAttributes::default();
a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<from_self>>()? {
attributes.from_self = true;
}
Ok(())
})
.expect("invalid 'render_resources' attribute format");
.find(|a| *a.path.get_ident().as_ref().unwrap() == RENDER_RESOURCE_ATTRIBUTE_NAME)
.map_or_else(RenderResourceAttributes::default, |a| {
syn::custom_keyword!(from_self);
let mut attributes = RenderResourceAttributes::default();
a.parse_args_with(|input: ParseStream| {
if input.parse::<Option<from_self>>()?.is_some() {
attributes.from_self = true;
}
Ok(())
})
.expect("invalid 'render_resources' attribute format");
attributes
},
);
attributes
});
let struct_name = &ast.ident;
let struct_name_string = struct_name.to_string();
@ -93,28 +88,24 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
.attrs
.iter()
.find(|a| {
a.path.get_ident().as_ref().unwrap().to_string()
== RENDER_RESOURCE_ATTRIBUTE_NAME
*a.path.get_ident().as_ref().unwrap() == RENDER_RESOURCE_ATTRIBUTE_NAME
})
.map_or_else(
|| RenderResourceFieldAttributes::default(),
|a| {
syn::custom_keyword!(ignore);
syn::custom_keyword!(buffer);
let mut attributes = RenderResourceFieldAttributes::default();
a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<ignore>>()? {
attributes.ignore = true;
} else if let Some(_) = input.parse::<Option<buffer>>()? {
attributes.buffer = true;
}
Ok(())
})
.expect("invalid 'render_resources' attribute format");
.map_or_else(RenderResourceFieldAttributes::default, |a| {
syn::custom_keyword!(ignore);
syn::custom_keyword!(buffer);
let mut attributes = RenderResourceFieldAttributes::default();
a.parse_args_with(|input: ParseStream| {
if input.parse::<Option<ignore>>()?.is_some() {
attributes.ignore = true;
} else if input.parse::<Option<buffer>>()?.is_some() {
attributes.buffer = true;
}
Ok(())
})
.expect("invalid 'render_resources' attribute format");
attributes
},
),
attributes
}),
)
})
.collect::<Vec<(&Field, RenderResourceFieldAttributes)>>();

View file

@ -5,7 +5,7 @@ use proc_macro2::Ident;
use quote::quote;
use syn::{parse_macro_input, Data, DataStruct, DeriveInput, Fields, Path};
static SHADER_DEF_ATTRIBUTE_NAME: &'static str = "shader_def";
static SHADER_DEF_ATTRIBUTE_NAME: &str = "shader_def";
pub fn derive_shader_defs(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
@ -25,10 +25,7 @@ pub fn derive_shader_defs(input: TokenStream) -> TokenStream {
.filter(|f| {
f.attrs
.iter()
.find(|a| {
a.path.get_ident().as_ref().unwrap().to_string() == SHADER_DEF_ATTRIBUTE_NAME
})
.is_some()
.any(|a| *a.path.get_ident().as_ref().unwrap() == SHADER_DEF_ATTRIBUTE_NAME)
})
.map(|f| f.ident.as_ref().unwrap())
.collect::<Vec<&Ident>>();

View file

@ -71,7 +71,7 @@ impl Diagnostic {
}
pub fn average(&self) -> Option<f64> {
if self.history.len() > 0 {
if !self.history.is_empty() {
Some(self.sum / self.history.len() as f64)
} else {
None
@ -93,7 +93,7 @@ impl Diagnostic {
}
}
return None;
None
}
pub fn get_max_history_length(&self) -> usize {

View file

@ -31,7 +31,7 @@ impl Profiler for SystemProfiler {
let mut system_profiles = self.system_profiles.write().unwrap();
let profiles = system_profiles
.entry(scope.clone())
.or_insert_with(|| SystemProfiles::default());
.or_insert_with(SystemProfiles::default);
profiles.current_start = Some(Instant::now());
}

View file

@ -69,6 +69,7 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
unsafe fn put(mut self, mut f: impl FnMut(*mut u8, std::any::TypeId, usize) -> bool) {
#(
if f((&mut self.#fields as *mut #tys).cast::<u8>(), std::any::TypeId::of::<#tys>(), std::mem::size_of::<#tys>()) {
#[allow(clippy::forget_copy)]
std::mem::forget(self.#fields);
}
)*

View file

@ -220,6 +220,12 @@ impl Archetype {
self.len
}
#[allow(missing_docs)]
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
#[allow(missing_docs)]
pub fn iter_entities(&self) -> impl Iterator<Item = &u32> {
self.entities.iter().take(self.len as usize)
@ -239,6 +245,7 @@ impl Archetype {
&self.types
}
/// # Safety
/// `index` must be in-bounds
pub(crate) unsafe fn get_dynamic(
&self,
@ -255,6 +262,7 @@ impl Archetype {
))
}
/// # Safety
/// Every type must be written immediately after this call
pub unsafe fn allocate(&mut self, id: u32) -> u32 {
if self.len as usize == self.entities.len() {
@ -408,7 +416,13 @@ impl Archetype {
}
}
#[allow(missing_docs)]
/// # Safety
///
/// - `component` must point to valid memory
/// - the component `ty`pe must be registered
/// - `index` must be in-bound
/// - `size` must be the size of the component
/// - the storage array must be big enough
pub unsafe fn put_dynamic(
&mut self,
component: *mut u8,

View file

@ -72,7 +72,11 @@ pub struct Ref<'a, T: Component> {
}
impl<'a, T: Component> Ref<'a, T> {
#[allow(missing_docs)]
/// Creates a new entity component borrow
///
/// # Safety
///
/// - the index of the component must be valid
pub unsafe fn new(archetype: &'a Archetype, index: u32) -> Result<Self, MissingComponent> {
let target = NonNull::new_unchecked(
archetype
@ -120,7 +124,11 @@ pub struct RefMut<'a, T: Component> {
}
impl<'a, T: Component> RefMut<'a, T> {
#[allow(missing_docs)]
/// Creates a new entity component mutable borrow
///
/// # Safety
///
/// - the index of the component must be valid
pub unsafe fn new(archetype: &'a Archetype, index: u32) -> Result<Self, MissingComponent> {
let target = NonNull::new_unchecked(
archetype

View file

@ -11,6 +11,7 @@ use std::error::Error;
#[derive(Debug, Clone, Copy, Hash, Eq, Ord, PartialEq, PartialOrd)]
pub struct Entity(u32);
#[allow(clippy::new_without_default)]
impl Entity {
#[allow(missing_docs)]
pub fn new() -> Self {

View file

@ -47,6 +47,9 @@ pub trait Fetch<'a>: Sized {
fn release(archetype: &Archetype);
/// if this returns true, the current item will be skipped during iteration
///
/// # Safety
/// shouldn't be called if there is no current item
unsafe fn should_skip(&self) -> bool {
false
}
@ -793,7 +796,7 @@ struct ChunkIter<Q: Query> {
}
impl<Q: Query> ChunkIter<Q> {
unsafe fn next<'a, 'w>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> {
unsafe fn next<'a>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> {
loop {
if self.len == 0 {
return None;

View file

@ -165,7 +165,7 @@ impl World {
let removed_entities = self
.removed_components
.entry(ty.id())
.or_insert_with(|| Vec::new());
.or_insert_with(Vec::new);
removed_entities.push(entity);
}
Ok(())
@ -202,7 +202,7 @@ impl World {
let removed_entities = self
.removed_components
.entry(ty.id())
.or_insert_with(|| Vec::new());
.or_insert_with(Vec::new);
removed_entities.extend(archetype.iter_entities().map(|id| Entity::from_id(*id)));
}
archetype.clear();
@ -505,7 +505,7 @@ impl World {
state.mutated_entities[target_index as usize] = is_mutated;
} else {
let removed_entities =
removed_components.entry(ty).or_insert_with(|| Vec::new());
removed_components.entry(ty).or_insert_with(Vec::new);
removed_entities.push(entity);
}
})

View file

@ -17,6 +17,10 @@ pub struct Res<'a, T: Resource> {
}
impl<'a, T: Resource> Res<'a, T> {
/// Creates a reference cell to a Resource from a pointer
///
/// # Safety
/// The pointer must have correct lifetime / storage
pub unsafe fn new(value: NonNull<T>) -> Self {
Self {
value: &*value.as_ptr(),
@ -26,6 +30,7 @@ impl<'a, T: Resource> Res<'a, T> {
/// A clone that is unsafe to perform. You probably shouldn't use this.
pub trait UnsafeClone {
#[allow(clippy::missing_safety_doc)]
unsafe fn unsafe_clone(&self) -> Self;
}
@ -53,6 +58,10 @@ pub struct ResMut<'a, T: Resource> {
}
impl<'a, T: Resource> ResMut<'a, T> {
/// Creates a mutable reference cell to a Resource from a pointer
///
/// # Safety
/// The pointer must have correct lifetime / storage / ownership
pub unsafe fn new(value: NonNull<T>) -> Self {
Self {
value: value.as_ptr(),
@ -133,6 +142,7 @@ pub trait FetchResource<'a>: Sized {
fn borrow(resources: &Resources);
fn release(resources: &Resources);
#[allow(clippy::missing_safety_doc)]
unsafe fn get(resources: &'a Resources, system_id: Option<SystemId>) -> Self::Item;
}

View file

@ -42,10 +42,12 @@ impl Resources {
self.get_resource_mut(ResourceIndex::Global)
}
#[allow(clippy::needless_lifetimes)]
pub fn get_local<'a, T: Resource>(&'a self, id: SystemId) -> Option<Ref<'a, T>> {
self.get_resource(ResourceIndex::System(id))
}
#[allow(clippy::needless_lifetimes)]
pub fn get_local_mut<'a, T: Resource>(&'a self, id: SystemId) -> Option<RefMut<'a, T>> {
self.get_resource_mut(ResourceIndex::System(id))
}
@ -82,10 +84,13 @@ impl Resources {
}),
};
if index == archetype.len() {
unsafe { archetype.allocate(index) };
} else if index > archetype.len() {
panic!("attempted to access index beyond 'current_capacity + 1'")
use std::cmp::Ordering;
match index.cmp(&archetype.len()) {
Ordering::Equal => {
unsafe { archetype.allocate(index) };
}
Ordering::Greater => panic!("attempted to access index beyond 'current_capacity + 1'"),
Ordering::Less => (),
}
unsafe {
@ -140,6 +145,7 @@ impl Resources {
}
#[inline]
#[allow(clippy::missing_safety_doc)]
pub unsafe fn get_unsafe_ref<T: Resource>(&self, resource_index: ResourceIndex) -> NonNull<T> {
self.resource_data
.get(&TypeId::of::<T>())

View file

@ -1,4 +1,5 @@
mod parallel_executor;
#[allow(clippy::module_inception)]
mod schedule;
pub use parallel_executor::*;

View file

@ -51,7 +51,7 @@ impl ParallelExecutor {
if schedule_changed {
self.stages.clear();
self.stages
.resize_with(schedule.stage_order.len(), || ExecutorStage::default());
.resize_with(schedule.stage_order.len(), ExecutorStage::default);
}
for (stage_name, executor_stage) in schedule.stage_order.iter().zip(self.stages.iter_mut())
{
@ -208,9 +208,10 @@ impl ExecutorStage {
ThreadLocalExecution::NextFlush => {
let resource_access = system.resource_access();
// if any system before this one conflicts, check all systems that came before for compatibility
if current_archetype_access.is_compatible(archetype_access) == false
|| current_resource_access.is_compatible(resource_access) == false
if !current_archetype_access.is_compatible(archetype_access)
|| !current_resource_access.is_compatible(resource_access)
{
#[allow(clippy::needless_range_loop)]
for earlier_system_index in
prepare_system_index_range.start..system_index
{
@ -223,14 +224,12 @@ impl ExecutorStage {
);
// if earlier system is incompatible, make the current system dependent
if earlier_system
if !earlier_system
.archetype_access()
.is_compatible(archetype_access)
== false
|| earlier_system
|| !earlier_system
.resource_access()
.is_compatible(resource_access)
== false
{
self.system_dependents[earlier_system_index].push(system_index);
self.system_dependencies[system_index]

View file

@ -25,7 +25,7 @@ pub struct Schedule {
impl Schedule {
pub fn add_stage(&mut self, stage: impl Into<Cow<'static, str>>) {
let stage: Cow<str> = stage.into();
if let Some(_) = self.stages.get(&stage) {
if self.stages.get(&stage).is_some() {
panic!("Stage already exists: {}", stage);
} else {
self.stages.insert(stage.clone(), Vec::new());
@ -40,7 +40,7 @@ impl Schedule {
) {
let target: Cow<str> = target.into();
let stage: Cow<str> = stage.into();
if let Some(_) = self.stages.get(&stage) {
if self.stages.get(&stage).is_some() {
panic!("Stage already exists: {}", stage);
}
@ -63,7 +63,7 @@ impl Schedule {
) {
let target: Cow<str> = target.into();
let stage: Cow<str> = stage.into();
if let Some(_) = self.stages.get(&stage) {
if self.stages.get(&stage).is_some() {
panic!("Stage already exists: {}", stage);
}
@ -173,7 +173,7 @@ impl Schedule {
let thread_pool_builder = resources
.get::<ParallelExecutorOptions>()
.map(|options| (*options).clone())
.unwrap_or_else(|| ParallelExecutorOptions::default())
.unwrap_or_else(ParallelExecutorOptions::default)
.create_builder();
// For now, bevy_ecs only uses the global thread pool so it is sufficient to configure it once here.
// Dont call .unwrap() as the function is called twice..

View file

@ -3,6 +3,7 @@ mod into_system;
#[cfg(feature = "profiler")]
mod profiler;
mod query;
#[allow(clippy::module_inception)]
mod system;
pub use commands::*;

View file

@ -49,9 +49,7 @@ impl<'a, Q: HecsQuery> Query<'a, Q> {
.mutable
.contains(location.archetype as usize)
{
self.world
.get(entity)
.map_err(|err| QueryError::ComponentError(err))
self.world.get(entity).map_err(QueryError::ComponentError)
} else {
Err(QueryError::CannotReadArchetype)
}
@ -83,20 +81,21 @@ impl<'a, Q: HecsQuery> Query<'a, Q> {
/// Gets a mutable reference to the entity's component of the given type. This will fail if the entity does not have
/// the given component type or if the given component type does not match this query.
pub fn get_mut<T: Component>(&self, entity: Entity) -> Result<RefMut<'_, T>, QueryError> {
if let Some(location) = self.world.get_entity_location(entity) {
if self
.archetype_access
.mutable
.contains(location.archetype as usize)
{
self.world
.get_mut(entity)
.map_err(|err| QueryError::ComponentError(err))
} else {
Err(QueryError::CannotWriteArchetype)
}
let location = match self.world.get_entity_location(entity) {
None => return Err(QueryError::ComponentError(ComponentError::NoSuchEntity)),
Some(location) => location,
};
if self
.archetype_access
.mutable
.contains(location.archetype as usize)
{
self.world
.get_mut(entity)
.map_err(QueryError::ComponentError)
} else {
Err(QueryError::ComponentError(ComponentError::NoSuchEntity))
Err(QueryError::CannotWriteArchetype)
}
}
@ -241,7 +240,7 @@ struct ChunkIter<Q: HecsQuery> {
impl<Q: HecsQuery> ChunkIter<Q> {
#[inline]
unsafe fn next<'a, 'w>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> {
unsafe fn next<'a>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> {
loop {
if self.len == 0 {
return None;

View file

@ -14,6 +14,7 @@ pub enum ThreadLocalExecution {
pub struct SystemId(pub u32);
impl SystemId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
SystemId(rand::random::<u32>())
}

View file

@ -51,7 +51,7 @@ fn get_primitive_topology(mode: Mode) -> Result<PrimitiveTopology, GltfError> {
Mode::LineStrip => Ok(PrimitiveTopology::LineStrip),
Mode::Triangles => Ok(PrimitiveTopology::TriangleList),
Mode::TriangleStrip => Ok(PrimitiveTopology::TriangleStrip),
mode @ _ => Err(GltfError::UnsupportedPrimitive { mode }),
mode => Err(GltfError::UnsupportedPrimitive { mode }),
}
}
@ -60,7 +60,7 @@ pub fn load_gltf(asset_path: &Path, bytes: Vec<u8>) -> Result<Mesh, GltfError> {
let gltf = gltf::Gltf::from_slice(&bytes)?;
let buffer_data = load_buffers(gltf.buffers(), asset_path)?;
for scene in gltf.scenes() {
for node in scene.nodes() {
if let Some(node) = scene.nodes().next() {
return Ok(load_node(&buffer_data, &node, 1)?);
}
}
@ -71,43 +71,41 @@ pub fn load_gltf(asset_path: &Path, bytes: Vec<u8>) -> Result<Mesh, GltfError> {
fn load_node(buffer_data: &[Vec<u8>], node: &gltf::Node, depth: i32) -> Result<Mesh, GltfError> {
if let Some(mesh) = node.mesh() {
for primitive in mesh.primitives() {
if let Some(primitive) = mesh.primitives().next() {
let reader = primitive.reader(|buffer| Some(&buffer_data[buffer.index()]));
let primitive_topology = get_primitive_topology(primitive.mode())?;
let mut mesh = Mesh::new(primitive_topology);
reader
.read_positions()
.map(|v| VertexAttribute {
name: "Vertex_Position".into(),
values: VertexAttributeValues::Float3(v.collect()),
})
.map(|vertex_attribute| mesh.attributes.push(vertex_attribute));
reader
.read_normals()
.map(|v| VertexAttribute {
name: "Vertex_Normal".into(),
values: VertexAttributeValues::Float3(v.collect()),
})
.map(|vertex_attribute| mesh.attributes.push(vertex_attribute));
if let Some(vertex_attribute) = reader.read_positions().map(|v| VertexAttribute {
name: "Vertex_Position".into(),
values: VertexAttributeValues::Float3(v.collect()),
}) {
mesh.attributes.push(vertex_attribute);
}
reader
.read_tex_coords(0)
.map(|v| VertexAttribute {
name: "Vertex_Uv".into(),
values: VertexAttributeValues::Float2(v.into_f32().collect()),
})
.map(|vertex_attribute| mesh.attributes.push(vertex_attribute));
if let Some(vertex_attribute) = reader.read_normals().map(|v| VertexAttribute {
name: "Vertex_Normal".into(),
values: VertexAttributeValues::Float3(v.collect()),
}) {
mesh.attributes.push(vertex_attribute);
}
reader.read_indices().map(|indices| {
if let Some(vertex_attribute) = reader.read_tex_coords(0).map(|v| VertexAttribute {
name: "Vertex_Uv".into(),
values: VertexAttributeValues::Float2(v.into_f32().collect()),
}) {
mesh.attributes.push(vertex_attribute);
}
if let Some(indices) = reader.read_indices() {
mesh.indices = Some(indices.into_u32().collect::<Vec<u32>>());
});
};
return Ok(mesh);
}
}
for child in node.children() {
if let Some(child) = node.children().next() {
return Ok(load_node(buffer_data, &child, depth + 1)?);
}

View file

@ -19,10 +19,7 @@ pub enum ElementState {
impl ElementState {
pub fn is_pressed(&self) -> bool {
match self {
ElementState::Pressed => true,
_ => false,
}
matches!(self, ElementState::Pressed)
}
}

View file

@ -3,6 +3,7 @@ use bevy_render::{color::Color, renderer::RenderResources, shader::ShaderDefs, t
/// A material with "standard" properties used in PBR lighting
#[derive(RenderResources, ShaderDefs)]
#[allow(clippy::manual_non_exhaustive)]
pub struct StandardMaterial {
pub albedo: Color,
#[shader_def]
@ -10,6 +11,12 @@ pub struct StandardMaterial {
#[render_resources(ignore)]
#[shader_def]
pub shaded: bool,
// this is a manual implementation of the non exhaustive pattern,
// especially made to allow ..Default::default()
#[render_resources(ignore)]
#[doc(hidden)]
pub __non_exhaustive: (),
}
impl Default for StandardMaterial {
@ -18,6 +25,7 @@ impl Default for StandardMaterial {
albedo: Color::rgb(1.0, 1.0, 1.0),
albedo_texture: None,
shaded: true,
__non_exhaustive: (),
}
}
}

View file

@ -48,14 +48,12 @@ pub fn derive_properties(input: TokenStream) -> TokenStream {
f,
f.attrs
.iter()
.find(|a| {
a.path.get_ident().as_ref().unwrap().to_string() == PROP_ATTRIBUTE_NAME
})
.find(|a| *a.path.get_ident().as_ref().unwrap() == PROP_ATTRIBUTE_NAME)
.map(|a| {
syn::custom_keyword!(ignore);
let mut attribute_args = PropAttributeArgs { ignore: None };
a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<ignore>>()? {
if input.parse::<Option<ignore>>()?.is_some() {
attribute_args.ignore = Some(true);
return Ok(());
}

View file

@ -158,12 +158,16 @@ impl Property for DynamicProperties {
PropertyType::Map => {
for (i, prop) in properties.iter_props().enumerate() {
let name = properties.prop_name(i).unwrap();
self.prop_mut(name).map(|p| p.apply(prop));
if let Some(p) = self.prop_mut(name) {
p.apply(prop);
}
}
}
PropertyType::Seq => {
for (i, prop) in properties.iter_props().enumerate() {
self.prop_with_index_mut(i).map(|p| p.apply(prop));
if let Some(p) = self.prop_with_index_mut(i) {
p.apply(prop);
}
}
}
_ => panic!("DynamicProperties cannot be Value types"),

View file

@ -67,7 +67,7 @@ where
fn set(&mut self, value: &dyn Property) {
if let Some(properties) = value.as_properties() {
let len = properties.prop_len();
self.resize_with(len, || T::default());
self.resize_with(len, T::default);
if properties.property_type() != self.property_type() {
panic!(
@ -77,7 +77,9 @@ where
);
}
for (i, prop) in properties.iter_props().enumerate() {
self.prop_with_index_mut(i).map(|p| p.apply(prop));
if let Some(p) = self.prop_with_index_mut(i) {
p.apply(prop)
}
}
} else {
panic!("attempted to apply non-Properties type to Properties type");
@ -171,7 +173,7 @@ impl Property for bool {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -211,7 +213,7 @@ impl Property for usize {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -269,7 +271,7 @@ impl Property for u64 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -327,7 +329,7 @@ impl Property for u32 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -385,7 +387,7 @@ impl Property for u16 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -443,7 +445,7 @@ impl Property for u8 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -501,7 +503,7 @@ impl Property for isize {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -559,7 +561,7 @@ impl Property for i64 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -617,7 +619,7 @@ impl Property for i32 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -675,7 +677,7 @@ impl Property for i16 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -733,7 +735,7 @@ impl Property for i8 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -791,7 +793,7 @@ impl Property for f32 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]
@ -833,7 +835,7 @@ impl Property for f64 {
#[inline]
fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone())
Box::new(*self)
}
#[inline]

View file

@ -17,6 +17,7 @@ pub enum Serializable<'a> {
}
impl<'a> Serializable<'a> {
#[allow(clippy::should_implement_trait)]
pub fn borrow(&self) -> &dyn erased_serde::Serialize {
match self {
Serializable::Borrowed(serialize) => serialize,
@ -88,11 +89,9 @@ impl<'a> Serialize for DynamicPropertiesSerializer<'a> {
PropertyType::Seq => {
SeqSerializer::new(self.dynamic_properties, self.registry).serialize(serializer)
}
_ => {
return Err(serde::ser::Error::custom(
"DynamicProperties cannot be Value type",
))
}
_ => Err(serde::ser::Error::custom(
"DynamicProperties cannot be Value type",
)),
}
}
}
@ -511,7 +510,7 @@ where
.ok_or_else(|| de::Error::missing_field(TYPE_FIELD))?;
let mut dynamic_properties =
map.next_value_seed(MapPropertyDeserializer { registry })?;
dynamic_properties.type_name = type_name.to_string();
dynamic_properties.type_name = type_name;
return Ok(DynamicPropertiesOrProperty::DynamicProperties(
dynamic_properties,
));

View file

@ -91,7 +91,7 @@ impl PropertyTypeRegistration {
}
pub fn get_short_name(full_name: &str) -> String {
let mut split = full_name.splitn(2, "<");
let mut split = full_name.splitn(2, '<');
// main type
let mut short_name = split
@ -104,7 +104,7 @@ impl PropertyTypeRegistration {
// process generics if they exist
if let Some(generics) = split.next() {
if !generics.ends_with(">") {
if !generics.ends_with('>') {
panic!("should end with closing carrot")
}

View file

@ -123,7 +123,7 @@ where
.find(|(_i, is_index)| is_index(&key))
.map(|(i, _)| i);
if let Some(key_index) = key_index {
key_state.set(key_index, key.clone());
key_state.set(key_index, key);
key_state.finish()
} else {
return false;
@ -139,7 +139,7 @@ where
batch.add(value);
}
return true;
true
}
pub fn iter(&self) -> impl Iterator<Item = &Batch<TKey, TValue, TData>> {

View file

@ -1,5 +1,6 @@
// mod asset_batcher;
// mod asset_batcher2;
#[allow(clippy::module_inception)]
mod batch;
mod batcher;

View file

@ -17,7 +17,7 @@ impl ActiveCameras {
}
pub fn get(&self, name: &str) -> Option<Entity> {
self.cameras.get(name).and_then(|e| e.clone())
self.cameras.get(name).and_then(|e| *e)
}
}
@ -26,7 +26,7 @@ pub fn active_cameras_system(
mut query: Query<(Entity, &Camera)>,
) {
for (name, active_camera) in active_cameras.cameras.iter_mut() {
if let None = active_camera {
if active_camera.is_none() {
for (camera_entity, camera) in &mut query.iter() {
if let Some(ref current_name) = camera.name {
if current_name == name {

View file

@ -1,4 +1,5 @@
mod active_cameras;
#[allow(clippy::module_inception)]
mod camera;
mod projection;
mod visible_entities;

View file

@ -142,7 +142,7 @@ impl<'a> UnsafeClone for DrawContext<'a> {
render_resource_context: self.render_resource_context.unsafe_clone(),
vertex_buffer_descriptors: self.vertex_buffer_descriptors.unsafe_clone(),
shared_buffers: self.shared_buffers.unsafe_clone(),
current_pipeline: self.current_pipeline.clone(),
current_pipeline: self.current_pipeline,
}
}
}

View file

@ -34,6 +34,10 @@ impl VertexAttributeValues {
}
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
// TODO: add vertex format as parameter here and perform type conversions
pub fn get_bytes(&self) -> &[u8] {
match self {
@ -224,9 +228,9 @@ pub mod shape {
let mut normals = Vec::new();
let mut uvs = Vec::new();
for (position, normal, uv) in vertices.iter() {
positions.push(position.clone());
normals.push(normal.clone());
uvs.push(uv.clone());
positions.push(*position);
normals.push(*normal);
uvs.push(*uv);
}
let indices = vec![
@ -331,9 +335,9 @@ pub mod shape {
let mut normals = Vec::new();
let mut uvs = Vec::new();
for (position, normal, uv) in vertices.iter() {
positions.push(position.clone());
normals.push(normal.clone());
uvs.push(uv.clone());
positions.push(*position);
normals.push(*normal);
uvs.push(*uv);
}
Mesh {
@ -371,9 +375,9 @@ pub mod shape {
let mut normals = Vec::new();
let mut uvs = Vec::new();
for (position, normal, uv) in vertices.iter() {
positions.push(position.clone());
normals.push(normal.clone());
uvs.push(uv.clone());
positions.push(*position);
normals.push(*normal);
uvs.push(*uv);
}
Mesh {

View file

@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod mesh;
mod vertex;

View file

@ -1,4 +1,5 @@
mod ops;
#[allow(clippy::module_inception)]
mod pass;
mod render_pass;

View file

@ -4,7 +4,7 @@ use std::{
hash::{Hash, Hasher},
};
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, Eq)]
pub struct BindGroupDescriptor {
pub index: u32,
pub bindings: Vec<BindingDescriptor>,
@ -35,9 +35,17 @@ impl BindGroupDescriptor {
impl Hash for BindGroupDescriptor {
fn hash<H: Hasher>(&self, state: &mut H) {
// TODO: remove index from hash state (or at least id). index is not considered a part of a bind group on the gpu.
// bind groups are bound to indices in pipelines
// TODO: remove index from hash state (or at least id), and update the PartialEq implem.
// index is not considered a part of a bind group on the gpu.
// bind groups are bound to indices in pipelines.
self.index.hash(state);
self.bindings.hash(state);
}
}
impl PartialEq for BindGroupDescriptor {
fn eq(&self, other: &Self) -> bool {
// This MUST be kept in sync with the hash implementation above
self.index == other.index && self.bindings == other.bindings
}
}

View file

@ -1,5 +1,6 @@
mod bind_group;
mod binding;
#[allow(clippy::module_inception)]
mod pipeline;
mod pipeline_compiler;
mod pipeline_layout;

View file

@ -29,8 +29,7 @@ impl Default for PipelineSpecialization {
impl PipelineSpecialization {
pub fn empty() -> &'static PipelineSpecialization {
pub static EMPTY: Lazy<PipelineSpecialization> =
Lazy::new(|| PipelineSpecialization::default());
pub static EMPTY: Lazy<PipelineSpecialization> = Lazy::new(PipelineSpecialization::default);
&EMPTY
}
}
@ -72,7 +71,7 @@ impl PipelineCompiler {
let specialized_shaders = self
.specialized_shaders
.entry(*shader_handle)
.or_insert_with(|| Vec::new());
.or_insert_with(Vec::new);
let shader = shaders.get(shader_handle).unwrap();
@ -172,7 +171,7 @@ impl PipelineCompiler {
let specialized_pipelines = self
.specialized_pipelines
.entry(source_pipeline)
.or_insert_with(|| Vec::new());
.or_insert_with(Vec::new);
specialized_pipelines.push(SpecializedPipeline {
pipeline: specialized_pipeline_handle,
specialization: pipeline_specialization.clone(),

View file

@ -113,10 +113,7 @@ impl UniformProperty {
UniformProperty::Vec4 => 4 * 4,
UniformProperty::Mat3 => 4 * 4 * 3,
UniformProperty::Mat4 => 4 * 4 * 4,
UniformProperty::Struct(properties) => properties
.iter()
.map(|p| p.get_size())
.fold(0, |total, size| total + size),
UniformProperty::Struct(properties) => properties.iter().map(|p| p.get_size()).sum(),
UniformProperty::Array(property, length) => property.get_size() * *length as u64,
}
}

View file

@ -7,7 +7,9 @@ use crate::{
use bevy_asset::Handle;
use bevy_ecs::{Query, Res, ResMut};
use bevy_property::Properties;
#[derive(Properties, Default, Clone)]
#[non_exhaustive]
pub struct RenderPipeline {
pub pipeline: Handle<PipelineDescriptor>,
pub specialization: PipelineSpecialization,
@ -28,7 +30,6 @@ impl RenderPipeline {
RenderPipeline {
pipeline,
specialization,
..Default::default()
}
}
}

View file

@ -54,6 +54,7 @@ impl CommandQueue {
});
}
#[allow(clippy::too_many_arguments)]
pub fn copy_buffer_to_texture(
&mut self,
source_buffer: BufferId,

View file

@ -276,7 +276,7 @@ impl RenderGraph {
}
pub fn take_commands(&mut self) -> Commands {
std::mem::replace(&mut self.commands, Commands::default())
std::mem::take(&mut self.commands)
}
}

View file

@ -9,6 +9,7 @@ use uuid::Uuid;
pub struct NodeId(Uuid);
impl NodeId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
NodeId(Uuid::new_v4())
}
@ -48,7 +49,7 @@ pub struct Edges {
impl Edges {
pub(crate) fn add_input_edge(&mut self, edge: Edge) -> Result<(), RenderGraphError> {
if self.has_input_edge(&edge) {
return Err(RenderGraphError::EdgeAlreadyExists(edge.clone()));
return Err(RenderGraphError::EdgeAlreadyExists(edge));
}
self.input_edges.push(edge);
Ok(())
@ -56,7 +57,7 @@ impl Edges {
pub(crate) fn add_output_edge(&mut self, edge: Edge) -> Result<(), RenderGraphError> {
if self.has_output_edge(&edge) {
return Err(RenderGraphError::EdgeAlreadyExists(edge.clone()));
return Err(RenderGraphError::EdgeAlreadyExists(edge));
}
self.output_edges.push(edge);
Ok(())

View file

@ -98,6 +98,10 @@ impl ResourceSlots {
pub fn len(&self) -> usize {
self.slots.len()
}
pub fn is_empty(&self) -> bool {
self.slots.is_empty()
}
}
impl From<&ResourceSlotInfo> for ResourceSlot {

View file

@ -67,7 +67,7 @@ impl Node for TextureCopyNode {
texture_resource.get_texture().unwrap(),
[0, 0, 0],
0,
texture_descriptor.size.clone(),
texture_descriptor.size,
);
render_context.resources().remove_buffer(texture_buffer);
}

View file

@ -75,7 +75,7 @@ impl Stages {
node_borrows.push((node, indices));
}
node_borrows.sort_by_key(|(_node, indices)| indices.clone());
node_borrows.sort_by_key(|(_node, indices)| <&NodeIndices>::clone(indices));
let mut last_stage = usize::MAX;
let mut last_job = usize::MAX;
for (node, indices) in node_borrows.drain(..) {
@ -159,7 +159,7 @@ impl RenderGraphStager for DependentNodeStager {
// get all nodes without input. this intentionally includes nodes with no outputs
let output_only_nodes = render_graph
.iter_nodes()
.filter(|node| node.input_slots.len() == 0);
.filter(|node| node.input_slots.is_empty());
let mut stages = vec![Stage::default()];
let mut node_stages = HashMap::new();
for output_only_node in output_only_nodes {
@ -190,8 +190,7 @@ fn stage_node(
.edges
.input_edges
.iter()
.find(|e| !node_stages_and_jobs.contains_key(&e.get_output_node()))
.is_some()
.any(|e| !node_stages_and_jobs.contains_key(&e.get_output_node()))
{
return;
}

View file

@ -16,6 +16,7 @@ pub trait RenderContext {
destination_offset: u64,
size: u64,
);
#[allow(clippy::too_many_arguments)]
fn copy_buffer_to_texture(
&mut self,
source_buffer: BufferId,

View file

@ -4,6 +4,7 @@ use uuid::Uuid;
pub struct BufferId(Uuid);
impl BufferId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
BufferId(Uuid::new_v4())
}

View file

@ -1,5 +1,6 @@
mod bind_group;
mod buffer;
#[allow(clippy::module_inception)]
mod render_resource;
mod render_resource_bindings;
mod shared_buffers;

View file

@ -11,7 +11,7 @@ use std::{
};
use uuid::Uuid;
#[derive(Clone, Eq, PartialEq, Debug)]
#[derive(Clone, Eq, Debug)]
pub enum RenderResourceBinding {
Buffer {
buffer: BufferId,
@ -48,6 +48,34 @@ impl RenderResourceBinding {
}
}
impl PartialEq for RenderResourceBinding {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(
RenderResourceBinding::Buffer {
buffer: self_buffer,
range: self_range,
dynamic_index: _,
},
RenderResourceBinding::Buffer {
buffer: other_buffer,
range: other_range,
dynamic_index: _,
},
) => self_buffer == other_buffer && self_range == other_range,
(
RenderResourceBinding::Texture(self_texture),
RenderResourceBinding::Texture(other_texture),
) => RenderResourceId::from(*self_texture) == RenderResourceId::from(*other_texture),
(
RenderResourceBinding::Sampler(self_sampler),
RenderResourceBinding::Sampler(other_sampler),
) => RenderResourceId::from(*self_sampler) == RenderResourceId::from(*other_sampler),
_ => false,
}
}
}
impl Hash for RenderResourceBinding {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
@ -117,7 +145,7 @@ impl RenderResourceBindings {
for (name, (vertex_buffer, index_buffer)) in render_resource_bindings.vertex_buffers.iter()
{
self.set_vertex_buffer(name, *vertex_buffer, index_buffer.clone());
self.set_vertex_buffer(name, *vertex_buffer, *index_buffer);
}
}
@ -241,7 +269,7 @@ impl AssetRenderResourceBindings {
pub fn get_or_insert_mut<T>(&mut self, handle: Handle<T>) -> &mut RenderResourceBindings {
self.bindings
.entry(HandleUntyped::from(handle))
.or_insert_with(|| RenderResourceBindings::default())
.or_insert_with(RenderResourceBindings::default)
}
pub fn get_mut<T>(&mut self, handle: Handle<T>) -> Option<&mut RenderResourceBindings> {

View file

@ -85,7 +85,7 @@ impl SharedBuffers {
pub fn reset_command_queue(&self) -> CommandQueue {
let mut command_queue = self.command_queue.write().unwrap();
std::mem::replace(&mut *command_queue, CommandQueue::default())
std::mem::take(&mut *command_queue)
}
}

View file

@ -4,6 +4,7 @@ use uuid::Uuid;
pub struct TextureId(Uuid);
impl TextureId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
TextureId(Uuid::new_v4())
}
@ -13,6 +14,7 @@ impl TextureId {
pub struct SamplerId(Uuid);
impl SamplerId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
SamplerId(Uuid::new_v4())
}

View file

@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod shader;
mod shader_defs;
mod shader_reflect;

View file

@ -61,7 +61,7 @@ impl ShaderLayout {
} else {
let parts = vertex_attribute_descriptor
.name
.splitn(3, "_")
.splitn(3, '_')
.collect::<Vec<&str>>();
if parts.len() == 3 {
if parts[0] == "I" {
@ -82,13 +82,11 @@ impl ShaderLayout {
};
if let Some(current) = current_descriptor.as_mut() {
if &current.name == &current_buffer_name {
if current.name == current_buffer_name {
current.attributes.push(vertex_attribute_descriptor);
continue;
} else {
if visited_buffer_descriptors.contains(&current_buffer_name) {
panic!("Vertex attribute buffer names must be consecutive.")
}
} else if visited_buffer_descriptors.contains(&current_buffer_name) {
panic!("Vertex attribute buffer names must be consecutive.")
}
}

View file

@ -13,7 +13,7 @@ impl AssetLoader<Texture> for HdrTextureLoader {
let format = TextureFormat::Rgba32Float;
debug_assert_eq!(
format.pixel_size(),
4 * 4 * 1,
4 * 4,
"Format should have 32bit x 4 size"
);

View file

@ -108,28 +108,29 @@ impl AssetLoader<Texture> for ImageTextureLoader {
data = raw_data.as_slice().as_bytes().to_owned();
}
image::DynamicImage::ImageRgb16(i) => {
width = i.width();
height = i.height();
image::DynamicImage::ImageRgb16(image) => {
width = image.width();
height = image.height();
format = TextureFormat::Rgba16Uint;
let mut d =
let mut local_data =
Vec::with_capacity(width as usize * height as usize * format.pixel_size());
for pixel in i.into_raw().chunks_exact(3) {
for pixel in image.into_raw().chunks_exact(3) {
// TODO unsafe_get in release builds?
let r = pixel[0];
let g = pixel[1];
let b = pixel[2];
let a = u16::max_value();
d.extend_from_slice(&r.to_ne_bytes());
d.extend_from_slice(&g.to_ne_bytes());
d.extend_from_slice(&b.to_ne_bytes());
d.extend_from_slice(&a.to_ne_bytes());
local_data.extend_from_slice(&r.to_ne_bytes());
local_data.extend_from_slice(&g.to_ne_bytes());
local_data.extend_from_slice(&b.to_ne_bytes());
local_data.extend_from_slice(&a.to_ne_bytes());
}
data = d;
data = local_data;
}
image::DynamicImage::ImageRgba16(i) => {
width = i.width();

View file

@ -3,6 +3,7 @@ mod hdr_texture_loader;
#[cfg(feature = "png")]
mod image_texture_loader;
mod sampler_descriptor;
#[allow(clippy::module_inception)]
mod texture;
mod texture_descriptor;
mod texture_dimension;

View file

@ -155,7 +155,7 @@ impl RenderResource for Option<Handle<Texture>> {
}
fn texture(&self) -> Option<Handle<Texture>> {
self.clone()
*self
}
}
@ -171,6 +171,6 @@ impl RenderResource for Handle<Texture> {
}
fn texture(&self) -> Option<Handle<Texture>> {
Some(self.clone())
Some(*self)
}
}

View file

@ -14,6 +14,7 @@ pub struct Error {
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Clone, Debug, PartialEq)]
#[non_exhaustive]
pub enum ErrorCode {
Io(String),
Message(String),
@ -54,9 +55,6 @@ pub enum ErrorCode {
Utf8Error(Utf8Error),
TrailingCharacters,
#[doc(hidden)]
__Nonexhaustive,
}
impl fmt::Display for Error {
@ -108,7 +106,6 @@ impl fmt::Display for ErrorCode {
ErrorCode::UnderscoreAtBeginning => f.write_str("Found underscore at the beginning"),
ErrorCode::UnexpectedByte(_) => f.write_str("Unexpected byte"),
ErrorCode::TrailingCharacters => f.write_str("Non-whitespace trailing characters"),
_ => f.write_str("Unknown ErrorCode"),
}
}
}

View file

@ -62,6 +62,7 @@ struct Pretty {
/// .indentor("\t".to_owned());
/// ```
#[derive(Clone, Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct PrettyConfig {
/// Limit the pretty-ness up to the given depth.
#[serde(default = "default_depth_limit")]
@ -83,9 +84,6 @@ pub struct PrettyConfig {
pub decimal_floats: bool,
/// Enable extensions. Only configures 'implicit_some' for now.
pub extensions: Extensions,
/// Private field to ensure adding a field is non-breaking.
#[serde(skip)]
_future_proof: (),
}
impl PrettyConfig {
@ -206,7 +204,6 @@ impl Default for PrettyConfig {
enumerate_arrays: default_enumerate_arrays(),
extensions: Extensions::default(),
decimal_floats: default_decimal_floats(),
_future_proof: (),
}
}
}
@ -391,7 +388,7 @@ impl<'a, W: io::Write> ser::Serializer for &'a mut Serializer<W> {
fn serialize_f32(self, v: f32) -> Result<()> {
write!(self.output, "{}", v)?;
// TODO: use f32::EPSILON when minimum supported rust version is 1.43
pub const EPSILON: f32 = 1.19209290e-07_f32;
pub const EPSILON: f32 = 1.1920929e-7_f32;
if self.decimal_floats() && (v - v.floor()).abs() < EPSILON {
write!(self.output, ".0")?;
}
@ -401,7 +398,7 @@ impl<'a, W: io::Write> ser::Serializer for &'a mut Serializer<W> {
fn serialize_f64(self, v: f64) -> Result<()> {
write!(self.output, "{}", v)?;
// TODO: use f64::EPSILON when minimum supported rust version is 1.43
pub const EPSILON: f64 = 2.2204460492503131e-16_f64;
pub const EPSILON: f64 = 2.220446049250313e-16_f64;
if self.decimal_floats() && (v - v.floor()).abs() < EPSILON {
write!(self.output, ".0")?;
}

View file

@ -73,7 +73,7 @@ impl SceneSpawner {
let spawned = self
.spawned_scenes
.entry(scene_handle)
.or_insert_with(|| Vec::new());
.or_insert_with(Vec::new);
spawned.push(instance_id);
Ok(())
}
@ -98,7 +98,7 @@ impl SceneSpawner {
*instance_info
.entity_map
.entry(scene_entity.entity)
.or_insert_with(|| bevy_ecs::Entity::new())
.or_insert_with(bevy_ecs::Entity::new)
} else {
bevy_ecs::Entity::from_id(scene_entity.entity)
};

View file

@ -109,10 +109,10 @@ pub fn build_sprite_pipeline(shaders: &mut Assets<Shader>) -> PipelineDescriptor
}
pub mod node {
pub const COLOR_MATERIAL: &'static str = "color_material";
pub const SPRITE: &'static str = "sprite";
pub const SPRITE_SHEET: &'static str = "sprite_sheet";
pub const SPRITE_SHEET_SPRITE: &'static str = "sprite_sheet_sprite";
pub const COLOR_MATERIAL: &str = "color_material";
pub const SPRITE: &str = "sprite";
pub const SPRITE_SHEET: &str = "sprite_sheet";
pub const SPRITE_SHEET_SPRITE: &str = "sprite_sheet_sprite";
}
pub trait SpriteRenderGraphBuilder {

View file

@ -93,6 +93,10 @@ impl TextureAtlas {
self.textures.len()
}
pub fn is_empty(&self) -> bool {
self.textures.is_empty()
}
pub fn get_texture_index(&self, texture: Handle<Texture>) -> Option<usize> {
self.texture_handles
.as_ref()

View file

@ -25,7 +25,7 @@ impl Default for Translation {
impl From<Vec3> for Translation {
fn from(translation: Vec3) -> Self {
Self(Vec3::from(translation))
Self(translation)
}
}

View file

@ -32,7 +32,7 @@ impl WorldWriter for InsertChildren {
// NOTE: ideally this is just an else statement, but currently that _incorrectly_ fails borrow-checking
if !added {
world
.insert_one(self.parent, Children(SmallVec::from(self.children)))
.insert_one(self.parent, Children(self.children))
.unwrap();
}
}
@ -73,7 +73,7 @@ impl WorldWriter for PushChildren {
// NOTE: ideally this is just an else statement, but currently that _incorrectly_ fails borrow-checking
if !added {
world
.insert_one(self.parent, Children(SmallVec::from(self.children)))
.insert_one(self.parent, Children(self.children))
.unwrap();
}
}

View file

@ -14,13 +14,10 @@ where
{
// TODO: not a huge fan of this pattern. are there ways to do recursive updates in legion without allocations?
// TODO: the problem above might be resolvable with world splitting
let children = children_query.get::<Children>(entity).ok().map(|children| {
children
.0
.iter()
.map(|entity| *entity)
.collect::<Vec<Entity>>()
});
let children = children_query
.get::<Children>(entity)
.ok()
.map(|children| children.0.iter().cloned().collect::<Vec<Entity>>());
let parent_result = run(state, entity, parent_result, previous_result);
previous_result = None;
@ -47,13 +44,11 @@ pub struct DespawnRecursive {
}
fn despawn_with_children_recursive(world: &mut World, entity: Entity) {
if let Some(children) = world.get::<Children>(entity).ok().map(|children| {
children
.0
.iter()
.map(|entity| *entity)
.collect::<Vec<Entity>>()
}) {
if let Some(children) = world
.get::<Children>(entity)
.ok()
.map(|children| children.0.iter().cloned().collect::<Vec<Entity>>())
{
for e in children {
despawn_with_children_recursive(world, e);
}

View file

@ -1,4 +1,5 @@
mod child_builder;
#[allow(clippy::module_inception)]
mod hierarchy;
mod hierarchy_maintenance_system;
mod world_child_builder;

View file

@ -59,10 +59,8 @@ impl ComponentRegistry {
let mut registration = self.get_with_short_name(type_name);
if registration.is_none() {
registration = self.get_with_full_name(type_name);
if registration.is_none() {
if self.ambigous_names.contains(type_name) {
panic!("Type name is ambiguous: {}", type_name);
}
if registration.is_none() && self.ambigous_names.contains(type_name) {
panic!("Type name is ambiguous: {}", type_name);
}
}
registration
@ -99,11 +97,7 @@ impl ComponentRegistration {
component_properties_fn: |archetype: &Archetype, index: usize| {
// the type has been looked up by the caller, so this is safe
unsafe {
let ptr = archetype
.get::<T>()
.unwrap()
.as_ptr()
.offset(index as isize);
let ptr = archetype.get::<T>().unwrap().as_ptr().add(index);
ptr.as_ref().unwrap()
}
},

View file

@ -44,8 +44,8 @@ impl From<&Style> for stretch::style::Style {
margin: from_rect(value.margin),
padding: from_rect(value.padding),
border: from_rect(value.border),
flex_grow: value.flex_grow.into(),
flex_shrink: value.flex_shrink.into(),
flex_grow: value.flex_grow,
flex_shrink: value.flex_shrink,
flex_basis: value.flex_basis.into(),
size: from_size(value.size),
min_size: from_size(value.min_size),

View file

@ -31,8 +31,7 @@ impl FlexSurface {
let stretch_style = style.into();
let stretch_node = self.entity_to_stretch.entry(entity).or_insert_with(|| {
added = true;
let stretch_node = stretch.new_node(stretch_style, Vec::new()).unwrap();
stretch_node
stretch.new_node(stretch_style, Vec::new()).unwrap()
});
if !added {

View file

@ -33,7 +33,7 @@ use update::ui_z_system;
pub struct UiPlugin;
pub mod stage {
pub const UI: &'static str = "ui";
pub const UI: &str = "ui";
}
impl Plugin for UiPlugin {

View file

@ -66,13 +66,13 @@ pub fn build_ui_pipeline(shaders: &mut Assets<Shader>) -> PipelineDescriptor {
}
pub mod node {
pub const UI_CAMERA: &'static str = "ui_camera";
pub const NODE: &'static str = "node";
pub const UI_PASS: &'static str = "ui_pass";
pub const UI_CAMERA: &str = "ui_camera";
pub const NODE: &str = "node";
pub const UI_PASS: &str = "ui_pass";
}
pub mod camera {
pub const UI_CAMERA: &'static str = "UiCamera";
pub const UI_CAMERA: &str = "UiCamera";
}
pub trait UiRenderGraphBuilder {

View file

@ -51,5 +51,5 @@ fn update_node_entity(
position.set_z(z);
transform.set_w_axis(position);
return Some(global_z);
Some(global_z)
}

View file

@ -21,15 +21,15 @@ pub fn image_node_system(
mut query: Query<(&Image, &mut CalculatedSize, &Handle<ColorMaterial>)>,
) {
for (_image, mut calculated_size, material_handle) in &mut query.iter() {
materials
if let Some(texture) = materials
.get(material_handle)
.and_then(|material| material.texture)
.and_then(|texture_handle| textures.get(&texture_handle))
.map(|texture| {
calculated_size.size = Size {
width: texture.size.x(),
height: texture.size.y(),
};
});
{
calculated_size.size = Size {
width: texture.size.x(),
height: texture.size.y(),
};
}
}
}

View file

@ -49,6 +49,7 @@ pub fn text_system(
}
}
#[allow(clippy::too_many_arguments)]
pub fn draw_text_system(
mut draw_context: DrawContext,
fonts: Res<Assets<Font>>,

View file

@ -59,6 +59,7 @@ impl WgpuRenderResourceContext {
);
}
#[allow(clippy::too_many_arguments)]
pub fn copy_buffer_to_texture(
&self,
command_encoder: &mut wgpu::CommandEncoder,
@ -466,7 +467,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
RenderResourceBinding::Texture(resource) => {
let texture_view = texture_views
.get(&resource)
.expect(&format!("{:?}", resource));
.unwrap_or_else(|| panic!("{:?}", resource));
wgpu::BindingResource::TextureView(texture_view)
}
RenderResourceBinding::Sampler(resource) => {
@ -495,7 +496,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
let bind_group_info = bind_groups
.entry(bind_group_descriptor_id)
.or_insert_with(|| WgpuBindGroupInfo::default());
.or_insert_with(WgpuBindGroupInfo::default);
bind_group_info
.bind_groups
.insert(bind_group.id, wgpu_bind_group);
@ -540,7 +541,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
let buffer_slice = buffer.slice(..);
let data = buffer_slice.map_async(wgpu::MapMode::Write);
self.device.poll(wgpu::Maintain::Wait);
if let Err(_) = pollster::block_on(data) {
if pollster::block_on(data).is_err() {
panic!("failed to map buffer to host");
}
}

View file

@ -61,7 +61,7 @@ impl<'a> RenderPass for WgpuRenderPass<'a> {
.get(&bind_group_descriptor_id)
{
if let Some(wgpu_bind_group) = bind_group_info.bind_groups.get(&bind_group) {
const EMPTY: &'static [u32] = &[];
const EMPTY: &[u32] = &[];
let dynamic_uniform_indices =
if let Some(dynamic_uniform_indices) = dynamic_uniform_indices {
dynamic_uniform_indices

View file

@ -182,16 +182,12 @@ impl WgpuFrom<&BindType> for wgpu::BindingType {
match bind_type {
BindType::Uniform { dynamic, .. } => wgpu::BindingType::UniformBuffer {
dynamic: *dynamic,
min_binding_size: bind_type
.get_uniform_size()
.and_then(|size| wgpu::BufferSize::new(size)),
min_binding_size: bind_type.get_uniform_size().and_then(wgpu::BufferSize::new),
},
BindType::StorageBuffer { dynamic, readonly } => wgpu::BindingType::StorageBuffer {
dynamic: *dynamic,
readonly: *readonly,
min_binding_size: bind_type
.get_uniform_size()
.and_then(|size| wgpu::BufferSize::new(size)),
min_binding_size: bind_type.get_uniform_size().and_then(wgpu::BufferSize::new),
},
BindType::SampledTexture {
dimension,
@ -529,7 +525,7 @@ impl WgpuFrom<SamplerDescriptor> for wgpu::SamplerDescriptor<'_> {
lod_min_clamp: sampler_descriptor.lod_min_clamp,
lod_max_clamp: sampler_descriptor.lod_max_clamp,
compare: sampler_descriptor.compare_function.map(|c| c.wgpu_into()),
anisotropy_clamp: sampler_descriptor.anisotropy_clamp.clone(),
anisotropy_clamp: sampler_descriptor.anisotropy_clamp,
..Default::default()
}
}

View file

@ -44,11 +44,11 @@ impl Plugin for WindowPlugin {
let window_descriptor = resources
.get::<WindowDescriptor>()
.map(|descriptor| (*descriptor).clone())
.unwrap_or_else(|| WindowDescriptor::default());
.unwrap_or_else(WindowDescriptor::default);
let mut create_window_event = resources.get_mut::<Events<CreateWindow>>().unwrap();
create_window_event.send(CreateWindow {
id: WindowId::primary(),
descriptor: window_descriptor.clone(),
descriptor: window_descriptor,
});
}

View file

@ -15,8 +15,12 @@ pub fn exit_on_window_close_system(
mut app_exit_events: ResMut<Events<AppExit>>,
window_close_requested_events: Res<Events<WindowCloseRequested>>,
) {
for _ in state.event_reader.iter(&window_close_requested_events) {
if state
.event_reader
.iter(&window_close_requested_events)
.next()
.is_some()
{
app_exit_events.send(AppExit);
break;
}
}

View file

@ -15,9 +15,13 @@ impl WindowId {
pub fn is_primary(&self) -> bool {
*self == WindowId::primary()
}
}
pub fn to_string(&self) -> String {
self.0.to_simple().to_string()
use std::fmt;
impl fmt::Display for WindowId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.to_simple().fmt(f)
}
}
@ -65,6 +69,7 @@ impl Window {
}
#[derive(Debug, Clone)]
#[allow(clippy::manual_non_exhaustive)]
pub struct WindowDescriptor {
pub width: u32,
pub height: u32,
@ -72,6 +77,11 @@ pub struct WindowDescriptor {
pub vsync: bool,
pub resizable: bool,
pub mode: WindowMode,
// this is a manual implementation of the non exhaustive pattern,
// especially made to allow ..Default::default()
#[doc(hidden)]
pub __non_exhaustive: (),
}
impl Default for WindowDescriptor {
@ -83,6 +93,7 @@ impl Default for WindowDescriptor {
vsync: true,
resizable: true,
mode: WindowMode::Windowed,
__non_exhaustive: (),
}
}
}

View file

@ -7,9 +7,7 @@ pub fn convert_keyboard_input(keyboard_input: &winit::event::KeyboardInput) -> K
KeyboardInput {
scan_code: keyboard_input.scancode,
state: convert_element_state(keyboard_input.state),
key_code: keyboard_input
.virtual_keycode
.map(|v| convert_virtual_key_code(v)),
key_code: keyboard_input.virtual_keycode.map(convert_virtual_key_code),
}
}

View file

@ -115,22 +115,21 @@ pub fn winit_runner(mut app: App) {
let mut mouse_button_input_events =
app.resources.get_mut::<Events<MouseButtonInput>>().unwrap();
mouse_button_input_events.send(MouseButtonInput {
button: converters::convert_mouse_button(button.into()),
button: converters::convert_mouse_button(button),
state: converters::convert_element_state(state),
});
}
_ => {}
},
event::Event::DeviceEvent { ref event, .. } => match event {
DeviceEvent::MouseMotion { delta } => {
event::Event::DeviceEvent { ref event, .. } => {
if let DeviceEvent::MouseMotion { delta } = event {
let mut mouse_motion_events =
app.resources.get_mut::<Events<MouseMotion>>().unwrap();
mouse_motion_events.send(MouseMotion {
delta: Vec2::new(delta.0 as f32, delta.1 as f32),
});
}
_ => {}
},
}
event::Event::MainEventsCleared => {
handle_create_window_events(
&mut app.resources,

View file

@ -17,7 +17,7 @@ struct Rotator;
/// rotates the parent, which will result in the child also rotating
fn rotator_system(time: Res<Time>, mut query: Query<(&Rotator, &mut Rotation)>) {
for (_rotator, mut rotation) in &mut query.iter() {
rotation.0 = rotation.0 * Quat::from_rotation_x(3.0 * time.delta_seconds);
rotation.0 *= Quat::from_rotation_x(3.0 * time.delta_seconds);
}
}

View file

@ -21,7 +21,7 @@ struct Rotator;
/// rotates the parent, which will result in the child also rotating
fn rotator_system(time: Res<Time>, mut query: Query<(&Rotator, &mut Rotation)>) {
for (_rotator, mut rotation) in &mut query.iter() {
rotation.0 = rotation.0 * Quat::from_rotation_x(3.0 * time.delta_seconds);
rotation.0 *= Quat::from_rotation_x(3.0 * time.delta_seconds);
}
}

View file

@ -74,7 +74,7 @@ fn setup(
font: asset_server.load("assets/fonts/FiraSans-Bold.ttf").unwrap(),
value: "Score:".to_string(),
style: TextStyle {
color: Color::rgb(0.2, 0.2, 0.8).into(),
color: Color::rgb(0.2, 0.2, 0.8),
font_size: 40.0,
},
},
@ -219,7 +219,7 @@ fn ball_collision_system(
let collision = collide(ball_translation.0, ball_size, translation.0, sprite.size);
if let Some(collision) = collision {
// scorable colliders should be despawned and increment the scoreboard on collision
if let &Collider::Scorable = collider {
if let Collider::Scorable = *collider {
scoreboard.score += 1;
commands.despawn(collider_entity);
}

View file

@ -38,7 +38,7 @@ fn atlas_render_system(
return;
}
if let Some(set) = font_atlas_sets.get(&state.handle.as_handle::<FontAtlasSet>()) {
for (_size, font_atlas) in set.iter() {
if let Some((_size, font_atlas)) = set.iter().next() {
state.added = true;
let texture_atlas = texture_atlases.get(&font_atlas.texture_atlas).unwrap();
commands.spawn(ImageComponents {
@ -54,7 +54,6 @@ fn atlas_render_system(
},
..Default::default()
});
break;
}
}
}