Merge pull request #206 from multun/clippy

Add clippy support and fix all warnings / errors
This commit is contained in:
Carter Anderson 2020-08-19 21:33:28 -07:00 committed by GitHub
commit e31f576484
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
99 changed files with 451 additions and 367 deletions

View file

@ -26,7 +26,7 @@ jobs:
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
toolchain: nightly toolchain: nightly
components: rustfmt components: rustfmt, clippy
- name: Install alsa - name: Install alsa
run: sudo apt-get install libasound2-dev run: sudo apt-get install libasound2-dev
@ -37,6 +37,15 @@ jobs:
- name: Check the format - name: Check the format
run: cargo +nightly fmt --all -- --check run: cargo +nightly fmt --all -- --check
# type complexity must be ignored because we use huge templates for queries
- name: Run clippy
run: >
cargo +nightly clippy
--all-targets
--all-features
--
-D warnings
-A clippy::type_complexity
- name: Run tests - name: Run tests
run: cargo test --workspace run: cargo test --workspace

View file

@ -1,6 +1,7 @@
use crate::app_builder::AppBuilder; use crate::app_builder::AppBuilder;
use bevy_ecs::{ParallelExecutor, Resources, Schedule, World}; use bevy_ecs::{ParallelExecutor, Resources, Schedule, World};
#[allow(clippy::needless_doctest_main)]
/// Containers of app logic and data /// Containers of app logic and data
/// ///
/// App store the ECS World, Resources, Schedule, and Executor. They also store the "run" function of the App, which /// App store the ECS World, Resources, Schedule, and Executor. They also store the "run" function of the App, which

View file

@ -39,7 +39,7 @@ impl AppBuilder {
} }
pub fn run(&mut self) { pub fn run(&mut self) {
let app = std::mem::replace(&mut self.app, App::default()); let app = std::mem::take(&mut self.app);
app.run(); app.run();
} }
@ -209,7 +209,7 @@ impl AppBuilder {
where where
R: FromResources + Send + Sync + 'static, R: FromResources + Send + Sync + 'static,
{ {
let resource = R::from_resources(&mut self.app.resources); let resource = R::from_resources(&self.app.resources);
self.app.resources.insert(resource); self.app.resources.insert(resource);
self self

View file

@ -159,7 +159,7 @@ impl<T> EventReader<T> {
events: &'a Events<T>, events: &'a Events<T>,
predicate: impl FnMut(&&T) -> bool, predicate: impl FnMut(&&T) -> bool,
) -> Option<&'a T> { ) -> Option<&'a T> {
self.iter(events).rev().filter(predicate).next() self.iter(events).rev().find(predicate)
} }
/// Retrieves the earliest event in `events` that this reader hasn't seen yet. This updates the EventReader's /// Retrieves the earliest event in `events` that this reader hasn't seen yet. This updates the EventReader's

View file

@ -172,7 +172,7 @@ impl AssetServer {
pub fn watch_for_changes(&self) -> Result<(), AssetServerError> { pub fn watch_for_changes(&self) -> Result<(), AssetServerError> {
let mut filesystem_watcher = self.filesystem_watcher.write().unwrap(); let mut filesystem_watcher = self.filesystem_watcher.write().unwrap();
let _ = filesystem_watcher.get_or_insert_with(|| FilesystemWatcher::default()); let _ = filesystem_watcher.get_or_insert_with(FilesystemWatcher::default);
// watch current files // watch current files
let asset_info_paths = self.asset_info_paths.read().unwrap(); let asset_info_paths = self.asset_info_paths.read().unwrap();
for asset_path in asset_info_paths.keys() { for asset_path in asset_info_paths.keys() {
@ -186,45 +186,38 @@ impl AssetServer {
pub fn filesystem_watcher_system(asset_server: Res<AssetServer>) { pub fn filesystem_watcher_system(asset_server: Res<AssetServer>) {
use notify::event::{Event, EventKind, ModifyKind}; use notify::event::{Event, EventKind, ModifyKind};
let mut changed = HashSet::new(); let mut changed = HashSet::new();
loop {
let result = if let Some(filesystem_watcher) = while let Some(filesystem_watcher) =
asset_server.filesystem_watcher.read().unwrap().as_ref() asset_server.filesystem_watcher.read().unwrap().as_ref()
{ {
match filesystem_watcher.receiver.try_recv() { let result = match filesystem_watcher.receiver.try_recv() {
Ok(result) => result, Ok(result) => result,
Err(TryRecvError::Empty) => { Err(TryRecvError::Empty) => {
break; break;
} }
Err(TryRecvError::Disconnected) => panic!("FilesystemWatcher disconnected"), Err(TryRecvError::Disconnected) => panic!("FilesystemWatcher disconnected"),
}
} else {
break;
}; };
let event = result.unwrap(); let event = result.unwrap();
match event { if let Event {
Event {
kind: EventKind::Modify(ModifyKind::Data(_)), kind: EventKind::Modify(ModifyKind::Data(_)),
paths, paths,
.. ..
} => { } = event
{
for path in paths.iter() { for path in paths.iter() {
if !changed.contains(path) { if !changed.contains(path) {
let root_path = asset_server.get_root_path().unwrap(); let root_path = asset_server.get_root_path().unwrap();
let relative_path = path.strip_prefix(root_path).unwrap(); let relative_path = path.strip_prefix(root_path).unwrap();
match asset_server.load_untyped(relative_path) { match asset_server.load_untyped(relative_path) {
Ok(_) => {} Ok(_) => {}
Err(AssetServerError::AssetLoadError(error)) => { Err(AssetServerError::AssetLoadError(error)) => panic!("{:?}", error),
panic!("{:?}", error)
}
Err(_) => {} Err(_) => {}
} }
} }
} }
changed.extend(paths); changed.extend(paths);
} }
_ => {}
}
} }
} }
@ -244,8 +237,7 @@ impl AssetServer {
// TODO: add type checking here. people shouldn't be able to request a Handle<Texture> for a Mesh asset // TODO: add type checking here. people shouldn't be able to request a Handle<Texture> for a Mesh asset
pub fn load<T, P: AsRef<Path>>(&self, path: P) -> Result<Handle<T>, AssetServerError> { pub fn load<T, P: AsRef<Path>>(&self, path: P) -> Result<Handle<T>, AssetServerError> {
self.load_untyped(path) self.load_untyped(path).map(Handle::from)
.map(|handle_id| Handle::from(handle_id))
} }
pub fn load_sync<T: Resource, P: AsRef<Path>>( pub fn load_sync<T: Resource, P: AsRef<Path>>(
@ -338,15 +330,11 @@ impl AssetServer {
} }
pub fn set_load_state(&self, handle_id: HandleId, load_state: LoadState) { pub fn set_load_state(&self, handle_id: HandleId, load_state: LoadState) {
self.asset_info if let Some(asset_info) = self.asset_info.write().unwrap().get_mut(&handle_id) {
.write()
.unwrap()
.get_mut(&handle_id)
.map(|asset_info| {
if load_state.get_version() >= asset_info.load_state.get_version() { if load_state.get_version() >= asset_info.load_state.get_version() {
asset_info.load_state = load_state; asset_info.load_state = load_state;
} }
}); }
} }
pub fn get_load_state_untyped(&self, handle_id: HandleId) -> Option<LoadState> { pub fn get_load_state_untyped(&self, handle_id: HandleId) -> Option<LoadState> {
@ -453,7 +441,7 @@ impl AssetServer {
) { ) {
Ok(handle) => handle, Ok(handle) => handle,
Err(AssetServerError::MissingAssetHandler) => continue, Err(AssetServerError::MissingAssetHandler) => continue,
Err(err) => Err(err)?, Err(err) => return Err(err),
}; };
handle_ids.push(handle); handle_ids.push(handle);

View file

@ -17,6 +17,7 @@ pub(crate) const DEFAULT_HANDLE_ID: HandleId =
pub struct HandleId(pub Uuid); pub struct HandleId(pub Uuid);
impl HandleId { impl HandleId {
#[allow(clippy::new_without_default)]
pub fn new() -> HandleId { pub fn new() -> HandleId {
HandleId(Uuid::new_v4()) HandleId(Uuid::new_v4())
} }
@ -151,7 +152,7 @@ impl<T> Default for Handle<T> {
impl<T> Clone for Handle<T> { impl<T> Clone for Handle<T> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Handle { Handle {
id: self.id.clone(), id: self.id,
marker: PhantomData, marker: PhantomData,
} }
} }

View file

@ -47,6 +47,7 @@ pub struct AssetChannel<T: 'static> {
} }
impl<T> AssetChannel<T> { impl<T> AssetChannel<T> {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
let (sender, receiver) = crossbeam_channel::unbounded(); let (sender, receiver) = crossbeam_channel::unbounded();
AssetChannel { sender, receiver } AssetChannel { sender, receiver }

View file

@ -144,7 +144,7 @@ where
T: FromBytes, T: FromBytes,
{ {
fn from_bytes(bytes: &[u8]) -> Self { fn from_bytes(bytes: &[u8]) -> Self {
if bytes.len() == 0 { if bytes.is_empty() {
None None
} else { } else {
Some(T::from_bytes(bytes)) Some(T::from_bytes(bytes))

View file

@ -11,6 +11,7 @@ use std::{
#[derive(Debug, Copy, Clone, PartialOrd)] #[derive(Debug, Copy, Clone, PartialOrd)]
pub struct FloatOrd(pub f32); pub struct FloatOrd(pub f32);
#[allow(clippy::derive_ord_xor_partial_ord)]
impl Ord for FloatOrd { impl Ord for FloatOrd {
fn cmp(&self, other: &Self) -> Ordering { fn cmp(&self, other: &Self) -> Ordering {
self.0.partial_cmp(&other.0).unwrap_or_else(|| { self.0.partial_cmp(&other.0).unwrap_or_else(|| {

View file

@ -77,7 +77,7 @@ pub(crate) fn entity_labels_system(
let current_labels = entity_labels let current_labels = entity_labels
.entity_labels .entity_labels
.entry(entity) .entry(entity)
.or_insert_with(|| HashSet::default()); .or_insert_with(HashSet::default);
for removed_label in current_labels.difference(&labels.labels) { for removed_label in current_labels.difference(&labels.labels) {
if let Some(entities) = entity_labels.label_entities.get_mut(removed_label) { if let Some(entities) = entity_labels.label_entities.get_mut(removed_label) {
entities.retain(|e| *e != entity); entities.retain(|e| *e != entity);

View file

@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod time; mod time;
mod timer; mod timer;

View file

@ -12,7 +12,7 @@ struct VertexAttributes {
pub instance: bool, pub instance: bool,
} }
static VERTEX_ATTRIBUTE_NAME: &'static str = "vertex"; static VERTEX_ATTRIBUTE_NAME: &str = "vertex";
pub fn derive_as_vertex_buffer_descriptor(input: TokenStream) -> TokenStream { pub fn derive_as_vertex_buffer_descriptor(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput); let ast = parse_macro_input!(input as DeriveInput);
@ -34,16 +34,12 @@ pub fn derive_as_vertex_buffer_descriptor(input: TokenStream) -> TokenStream {
field field
.attrs .attrs
.iter() .iter()
.find(|a| { .find(|a| *a.path.get_ident().as_ref().unwrap() == VERTEX_ATTRIBUTE_NAME)
a.path.get_ident().as_ref().unwrap().to_string() == VERTEX_ATTRIBUTE_NAME .map_or_else(VertexAttributes::default, |a| {
})
.map_or_else(
|| VertexAttributes::default(),
|a| {
syn::custom_keyword!(ignore); syn::custom_keyword!(ignore);
let mut vertex_attributes = VertexAttributes::default(); let mut vertex_attributes = VertexAttributes::default();
a.parse_args_with(|input: ParseStream| { a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<ignore>>()? { if input.parse::<Option<ignore>>()?.is_some() {
vertex_attributes.ignore = true; vertex_attributes.ignore = true;
return Ok(()); return Ok(());
} }
@ -52,8 +48,7 @@ pub fn derive_as_vertex_buffer_descriptor(input: TokenStream) -> TokenStream {
.expect("invalid 'vertex' attribute format"); .expect("invalid 'vertex' attribute format");
vertex_attributes vertex_attributes
}, }),
),
) )
}) })
.collect::<Vec<(&Field, VertexAttributes)>>(); .collect::<Vec<(&Field, VertexAttributes)>>();

View file

@ -44,9 +44,9 @@ pub fn get_modules(attributes: &[Attribute]) -> Modules {
}; };
for attribute in attributes.iter() { for attribute in attributes.iter() {
if attribute.path.get_ident().as_ref().unwrap().to_string() == AS_CRATE_ATTRIBUTE_NAME { if *attribute.path.get_ident().as_ref().unwrap() == AS_CRATE_ATTRIBUTE_NAME {
let value = attribute.tokens.to_string(); let value = attribute.tokens.to_string();
if &value[1..value.len() - 1] == modules.bevy_render { if value[1..value.len() - 1] == modules.bevy_render {
modules.bevy_render = "crate".to_string(); modules.bevy_render = "crate".to_string();
} }
} }

View file

@ -16,7 +16,7 @@ struct RenderResourceAttributes {
pub from_self: bool, pub from_self: bool,
} }
static RENDER_RESOURCE_ATTRIBUTE_NAME: &'static str = "render_resources"; static RENDER_RESOURCE_ATTRIBUTE_NAME: &str = "render_resources";
pub fn derive_render_resources(input: TokenStream) -> TokenStream { pub fn derive_render_resources(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput); let ast = parse_macro_input!(input as DeriveInput);
@ -26,16 +26,12 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
let attributes = ast let attributes = ast
.attrs .attrs
.iter() .iter()
.find(|a| { .find(|a| *a.path.get_ident().as_ref().unwrap() == RENDER_RESOURCE_ATTRIBUTE_NAME)
a.path.get_ident().as_ref().unwrap().to_string() == RENDER_RESOURCE_ATTRIBUTE_NAME .map_or_else(RenderResourceAttributes::default, |a| {
})
.map_or_else(
|| RenderResourceAttributes::default(),
|a| {
syn::custom_keyword!(from_self); syn::custom_keyword!(from_self);
let mut attributes = RenderResourceAttributes::default(); let mut attributes = RenderResourceAttributes::default();
a.parse_args_with(|input: ParseStream| { a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<from_self>>()? { if input.parse::<Option<from_self>>()?.is_some() {
attributes.from_self = true; attributes.from_self = true;
} }
Ok(()) Ok(())
@ -43,8 +39,7 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
.expect("invalid 'render_resources' attribute format"); .expect("invalid 'render_resources' attribute format");
attributes attributes
}, });
);
let struct_name = &ast.ident; let struct_name = &ast.ident;
let struct_name_string = struct_name.to_string(); let struct_name_string = struct_name.to_string();
@ -93,19 +88,16 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
.attrs .attrs
.iter() .iter()
.find(|a| { .find(|a| {
a.path.get_ident().as_ref().unwrap().to_string() *a.path.get_ident().as_ref().unwrap() == RENDER_RESOURCE_ATTRIBUTE_NAME
== RENDER_RESOURCE_ATTRIBUTE_NAME
}) })
.map_or_else( .map_or_else(RenderResourceFieldAttributes::default, |a| {
|| RenderResourceFieldAttributes::default(),
|a| {
syn::custom_keyword!(ignore); syn::custom_keyword!(ignore);
syn::custom_keyword!(buffer); syn::custom_keyword!(buffer);
let mut attributes = RenderResourceFieldAttributes::default(); let mut attributes = RenderResourceFieldAttributes::default();
a.parse_args_with(|input: ParseStream| { a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<ignore>>()? { if input.parse::<Option<ignore>>()?.is_some() {
attributes.ignore = true; attributes.ignore = true;
} else if let Some(_) = input.parse::<Option<buffer>>()? { } else if input.parse::<Option<buffer>>()?.is_some() {
attributes.buffer = true; attributes.buffer = true;
} }
Ok(()) Ok(())
@ -113,8 +105,7 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
.expect("invalid 'render_resources' attribute format"); .expect("invalid 'render_resources' attribute format");
attributes attributes
}, }),
),
) )
}) })
.collect::<Vec<(&Field, RenderResourceFieldAttributes)>>(); .collect::<Vec<(&Field, RenderResourceFieldAttributes)>>();

View file

@ -5,7 +5,7 @@ use proc_macro2::Ident;
use quote::quote; use quote::quote;
use syn::{parse_macro_input, Data, DataStruct, DeriveInput, Fields, Path}; use syn::{parse_macro_input, Data, DataStruct, DeriveInput, Fields, Path};
static SHADER_DEF_ATTRIBUTE_NAME: &'static str = "shader_def"; static SHADER_DEF_ATTRIBUTE_NAME: &str = "shader_def";
pub fn derive_shader_defs(input: TokenStream) -> TokenStream { pub fn derive_shader_defs(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput); let ast = parse_macro_input!(input as DeriveInput);
@ -25,10 +25,7 @@ pub fn derive_shader_defs(input: TokenStream) -> TokenStream {
.filter(|f| { .filter(|f| {
f.attrs f.attrs
.iter() .iter()
.find(|a| { .any(|a| *a.path.get_ident().as_ref().unwrap() == SHADER_DEF_ATTRIBUTE_NAME)
a.path.get_ident().as_ref().unwrap().to_string() == SHADER_DEF_ATTRIBUTE_NAME
})
.is_some()
}) })
.map(|f| f.ident.as_ref().unwrap()) .map(|f| f.ident.as_ref().unwrap())
.collect::<Vec<&Ident>>(); .collect::<Vec<&Ident>>();

View file

@ -71,7 +71,7 @@ impl Diagnostic {
} }
pub fn average(&self) -> Option<f64> { pub fn average(&self) -> Option<f64> {
if self.history.len() > 0 { if !self.history.is_empty() {
Some(self.sum / self.history.len() as f64) Some(self.sum / self.history.len() as f64)
} else { } else {
None None
@ -93,7 +93,7 @@ impl Diagnostic {
} }
} }
return None; None
} }
pub fn get_max_history_length(&self) -> usize { pub fn get_max_history_length(&self) -> usize {

View file

@ -31,7 +31,7 @@ impl Profiler for SystemProfiler {
let mut system_profiles = self.system_profiles.write().unwrap(); let mut system_profiles = self.system_profiles.write().unwrap();
let profiles = system_profiles let profiles = system_profiles
.entry(scope.clone()) .entry(scope.clone())
.or_insert_with(|| SystemProfiles::default()); .or_insert_with(SystemProfiles::default);
profiles.current_start = Some(Instant::now()); profiles.current_start = Some(Instant::now());
} }

View file

@ -69,6 +69,7 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
unsafe fn put(mut self, mut f: impl FnMut(*mut u8, std::any::TypeId, usize) -> bool) { unsafe fn put(mut self, mut f: impl FnMut(*mut u8, std::any::TypeId, usize) -> bool) {
#( #(
if f((&mut self.#fields as *mut #tys).cast::<u8>(), std::any::TypeId::of::<#tys>(), std::mem::size_of::<#tys>()) { if f((&mut self.#fields as *mut #tys).cast::<u8>(), std::any::TypeId::of::<#tys>(), std::mem::size_of::<#tys>()) {
#[allow(clippy::forget_copy)]
std::mem::forget(self.#fields); std::mem::forget(self.#fields);
} }
)* )*

View file

@ -220,6 +220,12 @@ impl Archetype {
self.len self.len
} }
#[allow(missing_docs)]
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
#[allow(missing_docs)] #[allow(missing_docs)]
pub fn iter_entities(&self) -> impl Iterator<Item = &u32> { pub fn iter_entities(&self) -> impl Iterator<Item = &u32> {
self.entities.iter().take(self.len as usize) self.entities.iter().take(self.len as usize)
@ -239,6 +245,7 @@ impl Archetype {
&self.types &self.types
} }
/// # Safety
/// `index` must be in-bounds /// `index` must be in-bounds
pub(crate) unsafe fn get_dynamic( pub(crate) unsafe fn get_dynamic(
&self, &self,
@ -255,6 +262,7 @@ impl Archetype {
)) ))
} }
/// # Safety
/// Every type must be written immediately after this call /// Every type must be written immediately after this call
pub unsafe fn allocate(&mut self, id: u32) -> u32 { pub unsafe fn allocate(&mut self, id: u32) -> u32 {
if self.len as usize == self.entities.len() { if self.len as usize == self.entities.len() {
@ -408,7 +416,13 @@ impl Archetype {
} }
} }
#[allow(missing_docs)] /// # Safety
///
/// - `component` must point to valid memory
/// - the component `ty`pe must be registered
/// - `index` must be in-bound
/// - `size` must be the size of the component
/// - the storage array must be big enough
pub unsafe fn put_dynamic( pub unsafe fn put_dynamic(
&mut self, &mut self,
component: *mut u8, component: *mut u8,

View file

@ -72,7 +72,11 @@ pub struct Ref<'a, T: Component> {
} }
impl<'a, T: Component> Ref<'a, T> { impl<'a, T: Component> Ref<'a, T> {
#[allow(missing_docs)] /// Creates a new entity component borrow
///
/// # Safety
///
/// - the index of the component must be valid
pub unsafe fn new(archetype: &'a Archetype, index: u32) -> Result<Self, MissingComponent> { pub unsafe fn new(archetype: &'a Archetype, index: u32) -> Result<Self, MissingComponent> {
let target = NonNull::new_unchecked( let target = NonNull::new_unchecked(
archetype archetype
@ -120,7 +124,11 @@ pub struct RefMut<'a, T: Component> {
} }
impl<'a, T: Component> RefMut<'a, T> { impl<'a, T: Component> RefMut<'a, T> {
#[allow(missing_docs)] /// Creates a new entity component mutable borrow
///
/// # Safety
///
/// - the index of the component must be valid
pub unsafe fn new(archetype: &'a Archetype, index: u32) -> Result<Self, MissingComponent> { pub unsafe fn new(archetype: &'a Archetype, index: u32) -> Result<Self, MissingComponent> {
let target = NonNull::new_unchecked( let target = NonNull::new_unchecked(
archetype archetype

View file

@ -11,6 +11,7 @@ use std::error::Error;
#[derive(Debug, Clone, Copy, Hash, Eq, Ord, PartialEq, PartialOrd)] #[derive(Debug, Clone, Copy, Hash, Eq, Ord, PartialEq, PartialOrd)]
pub struct Entity(u32); pub struct Entity(u32);
#[allow(clippy::new_without_default)]
impl Entity { impl Entity {
#[allow(missing_docs)] #[allow(missing_docs)]
pub fn new() -> Self { pub fn new() -> Self {

View file

@ -47,6 +47,9 @@ pub trait Fetch<'a>: Sized {
fn release(archetype: &Archetype); fn release(archetype: &Archetype);
/// if this returns true, the current item will be skipped during iteration /// if this returns true, the current item will be skipped during iteration
///
/// # Safety
/// shouldn't be called if there is no current item
unsafe fn should_skip(&self) -> bool { unsafe fn should_skip(&self) -> bool {
false false
} }
@ -793,7 +796,7 @@ struct ChunkIter<Q: Query> {
} }
impl<Q: Query> ChunkIter<Q> { impl<Q: Query> ChunkIter<Q> {
unsafe fn next<'a, 'w>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> { unsafe fn next<'a>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> {
loop { loop {
if self.len == 0 { if self.len == 0 {
return None; return None;

View file

@ -165,7 +165,7 @@ impl World {
let removed_entities = self let removed_entities = self
.removed_components .removed_components
.entry(ty.id()) .entry(ty.id())
.or_insert_with(|| Vec::new()); .or_insert_with(Vec::new);
removed_entities.push(entity); removed_entities.push(entity);
} }
Ok(()) Ok(())
@ -202,7 +202,7 @@ impl World {
let removed_entities = self let removed_entities = self
.removed_components .removed_components
.entry(ty.id()) .entry(ty.id())
.or_insert_with(|| Vec::new()); .or_insert_with(Vec::new);
removed_entities.extend(archetype.iter_entities().map(|id| Entity::from_id(*id))); removed_entities.extend(archetype.iter_entities().map(|id| Entity::from_id(*id)));
} }
archetype.clear(); archetype.clear();
@ -505,7 +505,7 @@ impl World {
state.mutated_entities[target_index as usize] = is_mutated; state.mutated_entities[target_index as usize] = is_mutated;
} else { } else {
let removed_entities = let removed_entities =
removed_components.entry(ty).or_insert_with(|| Vec::new()); removed_components.entry(ty).or_insert_with(Vec::new);
removed_entities.push(entity); removed_entities.push(entity);
} }
}) })

View file

@ -17,6 +17,10 @@ pub struct Res<'a, T: Resource> {
} }
impl<'a, T: Resource> Res<'a, T> { impl<'a, T: Resource> Res<'a, T> {
/// Creates a reference cell to a Resource from a pointer
///
/// # Safety
/// The pointer must have correct lifetime / storage
pub unsafe fn new(value: NonNull<T>) -> Self { pub unsafe fn new(value: NonNull<T>) -> Self {
Self { Self {
value: &*value.as_ptr(), value: &*value.as_ptr(),
@ -26,6 +30,7 @@ impl<'a, T: Resource> Res<'a, T> {
/// A clone that is unsafe to perform. You probably shouldn't use this. /// A clone that is unsafe to perform. You probably shouldn't use this.
pub trait UnsafeClone { pub trait UnsafeClone {
#[allow(clippy::missing_safety_doc)]
unsafe fn unsafe_clone(&self) -> Self; unsafe fn unsafe_clone(&self) -> Self;
} }
@ -53,6 +58,10 @@ pub struct ResMut<'a, T: Resource> {
} }
impl<'a, T: Resource> ResMut<'a, T> { impl<'a, T: Resource> ResMut<'a, T> {
/// Creates a mutable reference cell to a Resource from a pointer
///
/// # Safety
/// The pointer must have correct lifetime / storage / ownership
pub unsafe fn new(value: NonNull<T>) -> Self { pub unsafe fn new(value: NonNull<T>) -> Self {
Self { Self {
value: value.as_ptr(), value: value.as_ptr(),
@ -133,6 +142,7 @@ pub trait FetchResource<'a>: Sized {
fn borrow(resources: &Resources); fn borrow(resources: &Resources);
fn release(resources: &Resources); fn release(resources: &Resources);
#[allow(clippy::missing_safety_doc)]
unsafe fn get(resources: &'a Resources, system_id: Option<SystemId>) -> Self::Item; unsafe fn get(resources: &'a Resources, system_id: Option<SystemId>) -> Self::Item;
} }

View file

@ -42,10 +42,12 @@ impl Resources {
self.get_resource_mut(ResourceIndex::Global) self.get_resource_mut(ResourceIndex::Global)
} }
#[allow(clippy::needless_lifetimes)]
pub fn get_local<'a, T: Resource>(&'a self, id: SystemId) -> Option<Ref<'a, T>> { pub fn get_local<'a, T: Resource>(&'a self, id: SystemId) -> Option<Ref<'a, T>> {
self.get_resource(ResourceIndex::System(id)) self.get_resource(ResourceIndex::System(id))
} }
#[allow(clippy::needless_lifetimes)]
pub fn get_local_mut<'a, T: Resource>(&'a self, id: SystemId) -> Option<RefMut<'a, T>> { pub fn get_local_mut<'a, T: Resource>(&'a self, id: SystemId) -> Option<RefMut<'a, T>> {
self.get_resource_mut(ResourceIndex::System(id)) self.get_resource_mut(ResourceIndex::System(id))
} }
@ -82,10 +84,13 @@ impl Resources {
}), }),
}; };
if index == archetype.len() { use std::cmp::Ordering;
match index.cmp(&archetype.len()) {
Ordering::Equal => {
unsafe { archetype.allocate(index) }; unsafe { archetype.allocate(index) };
} else if index > archetype.len() { }
panic!("attempted to access index beyond 'current_capacity + 1'") Ordering::Greater => panic!("attempted to access index beyond 'current_capacity + 1'"),
Ordering::Less => (),
} }
unsafe { unsafe {
@ -140,6 +145,7 @@ impl Resources {
} }
#[inline] #[inline]
#[allow(clippy::missing_safety_doc)]
pub unsafe fn get_unsafe_ref<T: Resource>(&self, resource_index: ResourceIndex) -> NonNull<T> { pub unsafe fn get_unsafe_ref<T: Resource>(&self, resource_index: ResourceIndex) -> NonNull<T> {
self.resource_data self.resource_data
.get(&TypeId::of::<T>()) .get(&TypeId::of::<T>())

View file

@ -1,4 +1,5 @@
mod parallel_executor; mod parallel_executor;
#[allow(clippy::module_inception)]
mod schedule; mod schedule;
pub use parallel_executor::*; pub use parallel_executor::*;

View file

@ -51,7 +51,7 @@ impl ParallelExecutor {
if schedule_changed { if schedule_changed {
self.stages.clear(); self.stages.clear();
self.stages self.stages
.resize_with(schedule.stage_order.len(), || ExecutorStage::default()); .resize_with(schedule.stage_order.len(), ExecutorStage::default);
} }
for (stage_name, executor_stage) in schedule.stage_order.iter().zip(self.stages.iter_mut()) for (stage_name, executor_stage) in schedule.stage_order.iter().zip(self.stages.iter_mut())
{ {
@ -208,9 +208,10 @@ impl ExecutorStage {
ThreadLocalExecution::NextFlush => { ThreadLocalExecution::NextFlush => {
let resource_access = system.resource_access(); let resource_access = system.resource_access();
// if any system before this one conflicts, check all systems that came before for compatibility // if any system before this one conflicts, check all systems that came before for compatibility
if current_archetype_access.is_compatible(archetype_access) == false if !current_archetype_access.is_compatible(archetype_access)
|| current_resource_access.is_compatible(resource_access) == false || !current_resource_access.is_compatible(resource_access)
{ {
#[allow(clippy::needless_range_loop)]
for earlier_system_index in for earlier_system_index in
prepare_system_index_range.start..system_index prepare_system_index_range.start..system_index
{ {
@ -223,14 +224,12 @@ impl ExecutorStage {
); );
// if earlier system is incompatible, make the current system dependent // if earlier system is incompatible, make the current system dependent
if earlier_system if !earlier_system
.archetype_access() .archetype_access()
.is_compatible(archetype_access) .is_compatible(archetype_access)
== false || !earlier_system
|| earlier_system
.resource_access() .resource_access()
.is_compatible(resource_access) .is_compatible(resource_access)
== false
{ {
self.system_dependents[earlier_system_index].push(system_index); self.system_dependents[earlier_system_index].push(system_index);
self.system_dependencies[system_index] self.system_dependencies[system_index]

View file

@ -25,7 +25,7 @@ pub struct Schedule {
impl Schedule { impl Schedule {
pub fn add_stage(&mut self, stage: impl Into<Cow<'static, str>>) { pub fn add_stage(&mut self, stage: impl Into<Cow<'static, str>>) {
let stage: Cow<str> = stage.into(); let stage: Cow<str> = stage.into();
if let Some(_) = self.stages.get(&stage) { if self.stages.get(&stage).is_some() {
panic!("Stage already exists: {}", stage); panic!("Stage already exists: {}", stage);
} else { } else {
self.stages.insert(stage.clone(), Vec::new()); self.stages.insert(stage.clone(), Vec::new());
@ -40,7 +40,7 @@ impl Schedule {
) { ) {
let target: Cow<str> = target.into(); let target: Cow<str> = target.into();
let stage: Cow<str> = stage.into(); let stage: Cow<str> = stage.into();
if let Some(_) = self.stages.get(&stage) { if self.stages.get(&stage).is_some() {
panic!("Stage already exists: {}", stage); panic!("Stage already exists: {}", stage);
} }
@ -63,7 +63,7 @@ impl Schedule {
) { ) {
let target: Cow<str> = target.into(); let target: Cow<str> = target.into();
let stage: Cow<str> = stage.into(); let stage: Cow<str> = stage.into();
if let Some(_) = self.stages.get(&stage) { if self.stages.get(&stage).is_some() {
panic!("Stage already exists: {}", stage); panic!("Stage already exists: {}", stage);
} }
@ -173,7 +173,7 @@ impl Schedule {
let thread_pool_builder = resources let thread_pool_builder = resources
.get::<ParallelExecutorOptions>() .get::<ParallelExecutorOptions>()
.map(|options| (*options).clone()) .map(|options| (*options).clone())
.unwrap_or_else(|| ParallelExecutorOptions::default()) .unwrap_or_else(ParallelExecutorOptions::default)
.create_builder(); .create_builder();
// For now, bevy_ecs only uses the global thread pool so it is sufficient to configure it once here. // For now, bevy_ecs only uses the global thread pool so it is sufficient to configure it once here.
// Dont call .unwrap() as the function is called twice.. // Dont call .unwrap() as the function is called twice..

View file

@ -3,6 +3,7 @@ mod into_system;
#[cfg(feature = "profiler")] #[cfg(feature = "profiler")]
mod profiler; mod profiler;
mod query; mod query;
#[allow(clippy::module_inception)]
mod system; mod system;
pub use commands::*; pub use commands::*;

View file

@ -49,9 +49,7 @@ impl<'a, Q: HecsQuery> Query<'a, Q> {
.mutable .mutable
.contains(location.archetype as usize) .contains(location.archetype as usize)
{ {
self.world self.world.get(entity).map_err(QueryError::ComponentError)
.get(entity)
.map_err(|err| QueryError::ComponentError(err))
} else { } else {
Err(QueryError::CannotReadArchetype) Err(QueryError::CannotReadArchetype)
} }
@ -83,7 +81,11 @@ impl<'a, Q: HecsQuery> Query<'a, Q> {
/// Gets a mutable reference to the entity's component of the given type. This will fail if the entity does not have /// Gets a mutable reference to the entity's component of the given type. This will fail if the entity does not have
/// the given component type or if the given component type does not match this query. /// the given component type or if the given component type does not match this query.
pub fn get_mut<T: Component>(&self, entity: Entity) -> Result<RefMut<'_, T>, QueryError> { pub fn get_mut<T: Component>(&self, entity: Entity) -> Result<RefMut<'_, T>, QueryError> {
if let Some(location) = self.world.get_entity_location(entity) { let location = match self.world.get_entity_location(entity) {
None => return Err(QueryError::ComponentError(ComponentError::NoSuchEntity)),
Some(location) => location,
};
if self if self
.archetype_access .archetype_access
.mutable .mutable
@ -91,13 +93,10 @@ impl<'a, Q: HecsQuery> Query<'a, Q> {
{ {
self.world self.world
.get_mut(entity) .get_mut(entity)
.map_err(|err| QueryError::ComponentError(err)) .map_err(QueryError::ComponentError)
} else { } else {
Err(QueryError::CannotWriteArchetype) Err(QueryError::CannotWriteArchetype)
} }
} else {
Err(QueryError::ComponentError(ComponentError::NoSuchEntity))
}
} }
pub fn removed<C: Component>(&self) -> &[Entity] { pub fn removed<C: Component>(&self) -> &[Entity] {
@ -241,7 +240,7 @@ struct ChunkIter<Q: HecsQuery> {
impl<Q: HecsQuery> ChunkIter<Q> { impl<Q: HecsQuery> ChunkIter<Q> {
#[inline] #[inline]
unsafe fn next<'a, 'w>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> { unsafe fn next<'a>(&mut self) -> Option<<Q::Fetch as Fetch<'a>>::Item> {
loop { loop {
if self.len == 0 { if self.len == 0 {
return None; return None;

View file

@ -14,6 +14,7 @@ pub enum ThreadLocalExecution {
pub struct SystemId(pub u32); pub struct SystemId(pub u32);
impl SystemId { impl SystemId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
SystemId(rand::random::<u32>()) SystemId(rand::random::<u32>())
} }

View file

@ -51,7 +51,7 @@ fn get_primitive_topology(mode: Mode) -> Result<PrimitiveTopology, GltfError> {
Mode::LineStrip => Ok(PrimitiveTopology::LineStrip), Mode::LineStrip => Ok(PrimitiveTopology::LineStrip),
Mode::Triangles => Ok(PrimitiveTopology::TriangleList), Mode::Triangles => Ok(PrimitiveTopology::TriangleList),
Mode::TriangleStrip => Ok(PrimitiveTopology::TriangleStrip), Mode::TriangleStrip => Ok(PrimitiveTopology::TriangleStrip),
mode @ _ => Err(GltfError::UnsupportedPrimitive { mode }), mode => Err(GltfError::UnsupportedPrimitive { mode }),
} }
} }
@ -60,7 +60,7 @@ pub fn load_gltf(asset_path: &Path, bytes: Vec<u8>) -> Result<Mesh, GltfError> {
let gltf = gltf::Gltf::from_slice(&bytes)?; let gltf = gltf::Gltf::from_slice(&bytes)?;
let buffer_data = load_buffers(gltf.buffers(), asset_path)?; let buffer_data = load_buffers(gltf.buffers(), asset_path)?;
for scene in gltf.scenes() { for scene in gltf.scenes() {
for node in scene.nodes() { if let Some(node) = scene.nodes().next() {
return Ok(load_node(&buffer_data, &node, 1)?); return Ok(load_node(&buffer_data, &node, 1)?);
} }
} }
@ -71,43 +71,41 @@ pub fn load_gltf(asset_path: &Path, bytes: Vec<u8>) -> Result<Mesh, GltfError> {
fn load_node(buffer_data: &[Vec<u8>], node: &gltf::Node, depth: i32) -> Result<Mesh, GltfError> { fn load_node(buffer_data: &[Vec<u8>], node: &gltf::Node, depth: i32) -> Result<Mesh, GltfError> {
if let Some(mesh) = node.mesh() { if let Some(mesh) = node.mesh() {
for primitive in mesh.primitives() { if let Some(primitive) = mesh.primitives().next() {
let reader = primitive.reader(|buffer| Some(&buffer_data[buffer.index()])); let reader = primitive.reader(|buffer| Some(&buffer_data[buffer.index()]));
let primitive_topology = get_primitive_topology(primitive.mode())?; let primitive_topology = get_primitive_topology(primitive.mode())?;
let mut mesh = Mesh::new(primitive_topology); let mut mesh = Mesh::new(primitive_topology);
reader
.read_positions() if let Some(vertex_attribute) = reader.read_positions().map(|v| VertexAttribute {
.map(|v| VertexAttribute {
name: "Vertex_Position".into(), name: "Vertex_Position".into(),
values: VertexAttributeValues::Float3(v.collect()), values: VertexAttributeValues::Float3(v.collect()),
}) }) {
.map(|vertex_attribute| mesh.attributes.push(vertex_attribute)); mesh.attributes.push(vertex_attribute);
}
reader if let Some(vertex_attribute) = reader.read_normals().map(|v| VertexAttribute {
.read_normals()
.map(|v| VertexAttribute {
name: "Vertex_Normal".into(), name: "Vertex_Normal".into(),
values: VertexAttributeValues::Float3(v.collect()), values: VertexAttributeValues::Float3(v.collect()),
}) }) {
.map(|vertex_attribute| mesh.attributes.push(vertex_attribute)); mesh.attributes.push(vertex_attribute);
}
reader if let Some(vertex_attribute) = reader.read_tex_coords(0).map(|v| VertexAttribute {
.read_tex_coords(0)
.map(|v| VertexAttribute {
name: "Vertex_Uv".into(), name: "Vertex_Uv".into(),
values: VertexAttributeValues::Float2(v.into_f32().collect()), values: VertexAttributeValues::Float2(v.into_f32().collect()),
}) }) {
.map(|vertex_attribute| mesh.attributes.push(vertex_attribute)); mesh.attributes.push(vertex_attribute);
}
reader.read_indices().map(|indices| { if let Some(indices) = reader.read_indices() {
mesh.indices = Some(indices.into_u32().collect::<Vec<u32>>()); mesh.indices = Some(indices.into_u32().collect::<Vec<u32>>());
}); };
return Ok(mesh); return Ok(mesh);
} }
} }
for child in node.children() { if let Some(child) = node.children().next() {
return Ok(load_node(buffer_data, &child, depth + 1)?); return Ok(load_node(buffer_data, &child, depth + 1)?);
} }

View file

@ -19,10 +19,7 @@ pub enum ElementState {
impl ElementState { impl ElementState {
pub fn is_pressed(&self) -> bool { pub fn is_pressed(&self) -> bool {
match self { matches!(self, ElementState::Pressed)
ElementState::Pressed => true,
_ => false,
}
} }
} }

View file

@ -3,6 +3,7 @@ use bevy_render::{color::Color, renderer::RenderResources, shader::ShaderDefs, t
/// A material with "standard" properties used in PBR lighting /// A material with "standard" properties used in PBR lighting
#[derive(RenderResources, ShaderDefs)] #[derive(RenderResources, ShaderDefs)]
#[allow(clippy::manual_non_exhaustive)]
pub struct StandardMaterial { pub struct StandardMaterial {
pub albedo: Color, pub albedo: Color,
#[shader_def] #[shader_def]
@ -10,6 +11,12 @@ pub struct StandardMaterial {
#[render_resources(ignore)] #[render_resources(ignore)]
#[shader_def] #[shader_def]
pub shaded: bool, pub shaded: bool,
// this is a manual implementation of the non exhaustive pattern,
// especially made to allow ..Default::default()
#[render_resources(ignore)]
#[doc(hidden)]
pub __non_exhaustive: (),
} }
impl Default for StandardMaterial { impl Default for StandardMaterial {
@ -18,6 +25,7 @@ impl Default for StandardMaterial {
albedo: Color::rgb(1.0, 1.0, 1.0), albedo: Color::rgb(1.0, 1.0, 1.0),
albedo_texture: None, albedo_texture: None,
shaded: true, shaded: true,
__non_exhaustive: (),
} }
} }
} }

View file

@ -48,14 +48,12 @@ pub fn derive_properties(input: TokenStream) -> TokenStream {
f, f,
f.attrs f.attrs
.iter() .iter()
.find(|a| { .find(|a| *a.path.get_ident().as_ref().unwrap() == PROP_ATTRIBUTE_NAME)
a.path.get_ident().as_ref().unwrap().to_string() == PROP_ATTRIBUTE_NAME
})
.map(|a| { .map(|a| {
syn::custom_keyword!(ignore); syn::custom_keyword!(ignore);
let mut attribute_args = PropAttributeArgs { ignore: None }; let mut attribute_args = PropAttributeArgs { ignore: None };
a.parse_args_with(|input: ParseStream| { a.parse_args_with(|input: ParseStream| {
if let Some(_) = input.parse::<Option<ignore>>()? { if input.parse::<Option<ignore>>()?.is_some() {
attribute_args.ignore = Some(true); attribute_args.ignore = Some(true);
return Ok(()); return Ok(());
} }

View file

@ -158,12 +158,16 @@ impl Property for DynamicProperties {
PropertyType::Map => { PropertyType::Map => {
for (i, prop) in properties.iter_props().enumerate() { for (i, prop) in properties.iter_props().enumerate() {
let name = properties.prop_name(i).unwrap(); let name = properties.prop_name(i).unwrap();
self.prop_mut(name).map(|p| p.apply(prop)); if let Some(p) = self.prop_mut(name) {
p.apply(prop);
}
} }
} }
PropertyType::Seq => { PropertyType::Seq => {
for (i, prop) in properties.iter_props().enumerate() { for (i, prop) in properties.iter_props().enumerate() {
self.prop_with_index_mut(i).map(|p| p.apply(prop)); if let Some(p) = self.prop_with_index_mut(i) {
p.apply(prop);
}
} }
} }
_ => panic!("DynamicProperties cannot be Value types"), _ => panic!("DynamicProperties cannot be Value types"),

View file

@ -67,7 +67,7 @@ where
fn set(&mut self, value: &dyn Property) { fn set(&mut self, value: &dyn Property) {
if let Some(properties) = value.as_properties() { if let Some(properties) = value.as_properties() {
let len = properties.prop_len(); let len = properties.prop_len();
self.resize_with(len, || T::default()); self.resize_with(len, T::default);
if properties.property_type() != self.property_type() { if properties.property_type() != self.property_type() {
panic!( panic!(
@ -77,7 +77,9 @@ where
); );
} }
for (i, prop) in properties.iter_props().enumerate() { for (i, prop) in properties.iter_props().enumerate() {
self.prop_with_index_mut(i).map(|p| p.apply(prop)); if let Some(p) = self.prop_with_index_mut(i) {
p.apply(prop)
}
} }
} else { } else {
panic!("attempted to apply non-Properties type to Properties type"); panic!("attempted to apply non-Properties type to Properties type");
@ -171,7 +173,7 @@ impl Property for bool {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -211,7 +213,7 @@ impl Property for usize {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -269,7 +271,7 @@ impl Property for u64 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -327,7 +329,7 @@ impl Property for u32 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -385,7 +387,7 @@ impl Property for u16 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -443,7 +445,7 @@ impl Property for u8 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -501,7 +503,7 @@ impl Property for isize {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -559,7 +561,7 @@ impl Property for i64 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -617,7 +619,7 @@ impl Property for i32 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -675,7 +677,7 @@ impl Property for i16 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -733,7 +735,7 @@ impl Property for i8 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -791,7 +793,7 @@ impl Property for f32 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]
@ -833,7 +835,7 @@ impl Property for f64 {
#[inline] #[inline]
fn clone_prop(&self) -> Box<dyn Property> { fn clone_prop(&self) -> Box<dyn Property> {
Box::new(self.clone()) Box::new(*self)
} }
#[inline] #[inline]

View file

@ -17,6 +17,7 @@ pub enum Serializable<'a> {
} }
impl<'a> Serializable<'a> { impl<'a> Serializable<'a> {
#[allow(clippy::should_implement_trait)]
pub fn borrow(&self) -> &dyn erased_serde::Serialize { pub fn borrow(&self) -> &dyn erased_serde::Serialize {
match self { match self {
Serializable::Borrowed(serialize) => serialize, Serializable::Borrowed(serialize) => serialize,
@ -88,11 +89,9 @@ impl<'a> Serialize for DynamicPropertiesSerializer<'a> {
PropertyType::Seq => { PropertyType::Seq => {
SeqSerializer::new(self.dynamic_properties, self.registry).serialize(serializer) SeqSerializer::new(self.dynamic_properties, self.registry).serialize(serializer)
} }
_ => { _ => Err(serde::ser::Error::custom(
return Err(serde::ser::Error::custom(
"DynamicProperties cannot be Value type", "DynamicProperties cannot be Value type",
)) )),
}
} }
} }
} }
@ -511,7 +510,7 @@ where
.ok_or_else(|| de::Error::missing_field(TYPE_FIELD))?; .ok_or_else(|| de::Error::missing_field(TYPE_FIELD))?;
let mut dynamic_properties = let mut dynamic_properties =
map.next_value_seed(MapPropertyDeserializer { registry })?; map.next_value_seed(MapPropertyDeserializer { registry })?;
dynamic_properties.type_name = type_name.to_string(); dynamic_properties.type_name = type_name;
return Ok(DynamicPropertiesOrProperty::DynamicProperties( return Ok(DynamicPropertiesOrProperty::DynamicProperties(
dynamic_properties, dynamic_properties,
)); ));

View file

@ -91,7 +91,7 @@ impl PropertyTypeRegistration {
} }
pub fn get_short_name(full_name: &str) -> String { pub fn get_short_name(full_name: &str) -> String {
let mut split = full_name.splitn(2, "<"); let mut split = full_name.splitn(2, '<');
// main type // main type
let mut short_name = split let mut short_name = split
@ -104,7 +104,7 @@ impl PropertyTypeRegistration {
// process generics if they exist // process generics if they exist
if let Some(generics) = split.next() { if let Some(generics) = split.next() {
if !generics.ends_with(">") { if !generics.ends_with('>') {
panic!("should end with closing carrot") panic!("should end with closing carrot")
} }

View file

@ -123,7 +123,7 @@ where
.find(|(_i, is_index)| is_index(&key)) .find(|(_i, is_index)| is_index(&key))
.map(|(i, _)| i); .map(|(i, _)| i);
if let Some(key_index) = key_index { if let Some(key_index) = key_index {
key_state.set(key_index, key.clone()); key_state.set(key_index, key);
key_state.finish() key_state.finish()
} else { } else {
return false; return false;
@ -139,7 +139,7 @@ where
batch.add(value); batch.add(value);
} }
return true; true
} }
pub fn iter(&self) -> impl Iterator<Item = &Batch<TKey, TValue, TData>> { pub fn iter(&self) -> impl Iterator<Item = &Batch<TKey, TValue, TData>> {

View file

@ -1,5 +1,6 @@
// mod asset_batcher; // mod asset_batcher;
// mod asset_batcher2; // mod asset_batcher2;
#[allow(clippy::module_inception)]
mod batch; mod batch;
mod batcher; mod batcher;

View file

@ -17,7 +17,7 @@ impl ActiveCameras {
} }
pub fn get(&self, name: &str) -> Option<Entity> { pub fn get(&self, name: &str) -> Option<Entity> {
self.cameras.get(name).and_then(|e| e.clone()) self.cameras.get(name).and_then(|e| *e)
} }
} }
@ -26,7 +26,7 @@ pub fn active_cameras_system(
mut query: Query<(Entity, &Camera)>, mut query: Query<(Entity, &Camera)>,
) { ) {
for (name, active_camera) in active_cameras.cameras.iter_mut() { for (name, active_camera) in active_cameras.cameras.iter_mut() {
if let None = active_camera { if active_camera.is_none() {
for (camera_entity, camera) in &mut query.iter() { for (camera_entity, camera) in &mut query.iter() {
if let Some(ref current_name) = camera.name { if let Some(ref current_name) = camera.name {
if current_name == name { if current_name == name {

View file

@ -1,4 +1,5 @@
mod active_cameras; mod active_cameras;
#[allow(clippy::module_inception)]
mod camera; mod camera;
mod projection; mod projection;
mod visible_entities; mod visible_entities;

View file

@ -142,7 +142,7 @@ impl<'a> UnsafeClone for DrawContext<'a> {
render_resource_context: self.render_resource_context.unsafe_clone(), render_resource_context: self.render_resource_context.unsafe_clone(),
vertex_buffer_descriptors: self.vertex_buffer_descriptors.unsafe_clone(), vertex_buffer_descriptors: self.vertex_buffer_descriptors.unsafe_clone(),
shared_buffers: self.shared_buffers.unsafe_clone(), shared_buffers: self.shared_buffers.unsafe_clone(),
current_pipeline: self.current_pipeline.clone(), current_pipeline: self.current_pipeline,
} }
} }
} }

View file

@ -34,6 +34,10 @@ impl VertexAttributeValues {
} }
} }
pub fn is_empty(&self) -> bool {
self.len() == 0
}
// TODO: add vertex format as parameter here and perform type conversions // TODO: add vertex format as parameter here and perform type conversions
pub fn get_bytes(&self) -> &[u8] { pub fn get_bytes(&self) -> &[u8] {
match self { match self {
@ -224,9 +228,9 @@ pub mod shape {
let mut normals = Vec::new(); let mut normals = Vec::new();
let mut uvs = Vec::new(); let mut uvs = Vec::new();
for (position, normal, uv) in vertices.iter() { for (position, normal, uv) in vertices.iter() {
positions.push(position.clone()); positions.push(*position);
normals.push(normal.clone()); normals.push(*normal);
uvs.push(uv.clone()); uvs.push(*uv);
} }
let indices = vec![ let indices = vec![
@ -331,9 +335,9 @@ pub mod shape {
let mut normals = Vec::new(); let mut normals = Vec::new();
let mut uvs = Vec::new(); let mut uvs = Vec::new();
for (position, normal, uv) in vertices.iter() { for (position, normal, uv) in vertices.iter() {
positions.push(position.clone()); positions.push(*position);
normals.push(normal.clone()); normals.push(*normal);
uvs.push(uv.clone()); uvs.push(*uv);
} }
Mesh { Mesh {
@ -371,9 +375,9 @@ pub mod shape {
let mut normals = Vec::new(); let mut normals = Vec::new();
let mut uvs = Vec::new(); let mut uvs = Vec::new();
for (position, normal, uv) in vertices.iter() { for (position, normal, uv) in vertices.iter() {
positions.push(position.clone()); positions.push(*position);
normals.push(normal.clone()); normals.push(*normal);
uvs.push(uv.clone()); uvs.push(*uv);
} }
Mesh { Mesh {

View file

@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod mesh; mod mesh;
mod vertex; mod vertex;

View file

@ -1,4 +1,5 @@
mod ops; mod ops;
#[allow(clippy::module_inception)]
mod pass; mod pass;
mod render_pass; mod render_pass;

View file

@ -4,7 +4,7 @@ use std::{
hash::{Hash, Hasher}, hash::{Hash, Hasher},
}; };
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, Eq)]
pub struct BindGroupDescriptor { pub struct BindGroupDescriptor {
pub index: u32, pub index: u32,
pub bindings: Vec<BindingDescriptor>, pub bindings: Vec<BindingDescriptor>,
@ -35,9 +35,17 @@ impl BindGroupDescriptor {
impl Hash for BindGroupDescriptor { impl Hash for BindGroupDescriptor {
fn hash<H: Hasher>(&self, state: &mut H) { fn hash<H: Hasher>(&self, state: &mut H) {
// TODO: remove index from hash state (or at least id). index is not considered a part of a bind group on the gpu. // TODO: remove index from hash state (or at least id), and update the PartialEq implem.
// bind groups are bound to indices in pipelines // index is not considered a part of a bind group on the gpu.
// bind groups are bound to indices in pipelines.
self.index.hash(state); self.index.hash(state);
self.bindings.hash(state); self.bindings.hash(state);
} }
} }
impl PartialEq for BindGroupDescriptor {
fn eq(&self, other: &Self) -> bool {
// This MUST be kept in sync with the hash implementation above
self.index == other.index && self.bindings == other.bindings
}
}

View file

@ -1,5 +1,6 @@
mod bind_group; mod bind_group;
mod binding; mod binding;
#[allow(clippy::module_inception)]
mod pipeline; mod pipeline;
mod pipeline_compiler; mod pipeline_compiler;
mod pipeline_layout; mod pipeline_layout;

View file

@ -29,8 +29,7 @@ impl Default for PipelineSpecialization {
impl PipelineSpecialization { impl PipelineSpecialization {
pub fn empty() -> &'static PipelineSpecialization { pub fn empty() -> &'static PipelineSpecialization {
pub static EMPTY: Lazy<PipelineSpecialization> = pub static EMPTY: Lazy<PipelineSpecialization> = Lazy::new(PipelineSpecialization::default);
Lazy::new(|| PipelineSpecialization::default());
&EMPTY &EMPTY
} }
} }
@ -72,7 +71,7 @@ impl PipelineCompiler {
let specialized_shaders = self let specialized_shaders = self
.specialized_shaders .specialized_shaders
.entry(*shader_handle) .entry(*shader_handle)
.or_insert_with(|| Vec::new()); .or_insert_with(Vec::new);
let shader = shaders.get(shader_handle).unwrap(); let shader = shaders.get(shader_handle).unwrap();
@ -172,7 +171,7 @@ impl PipelineCompiler {
let specialized_pipelines = self let specialized_pipelines = self
.specialized_pipelines .specialized_pipelines
.entry(source_pipeline) .entry(source_pipeline)
.or_insert_with(|| Vec::new()); .or_insert_with(Vec::new);
specialized_pipelines.push(SpecializedPipeline { specialized_pipelines.push(SpecializedPipeline {
pipeline: specialized_pipeline_handle, pipeline: specialized_pipeline_handle,
specialization: pipeline_specialization.clone(), specialization: pipeline_specialization.clone(),

View file

@ -113,10 +113,7 @@ impl UniformProperty {
UniformProperty::Vec4 => 4 * 4, UniformProperty::Vec4 => 4 * 4,
UniformProperty::Mat3 => 4 * 4 * 3, UniformProperty::Mat3 => 4 * 4 * 3,
UniformProperty::Mat4 => 4 * 4 * 4, UniformProperty::Mat4 => 4 * 4 * 4,
UniformProperty::Struct(properties) => properties UniformProperty::Struct(properties) => properties.iter().map(|p| p.get_size()).sum(),
.iter()
.map(|p| p.get_size())
.fold(0, |total, size| total + size),
UniformProperty::Array(property, length) => property.get_size() * *length as u64, UniformProperty::Array(property, length) => property.get_size() * *length as u64,
} }
} }

View file

@ -7,7 +7,9 @@ use crate::{
use bevy_asset::Handle; use bevy_asset::Handle;
use bevy_ecs::{Query, Res, ResMut}; use bevy_ecs::{Query, Res, ResMut};
use bevy_property::Properties; use bevy_property::Properties;
#[derive(Properties, Default, Clone)] #[derive(Properties, Default, Clone)]
#[non_exhaustive]
pub struct RenderPipeline { pub struct RenderPipeline {
pub pipeline: Handle<PipelineDescriptor>, pub pipeline: Handle<PipelineDescriptor>,
pub specialization: PipelineSpecialization, pub specialization: PipelineSpecialization,
@ -28,7 +30,6 @@ impl RenderPipeline {
RenderPipeline { RenderPipeline {
pipeline, pipeline,
specialization, specialization,
..Default::default()
} }
} }
} }

View file

@ -54,6 +54,7 @@ impl CommandQueue {
}); });
} }
#[allow(clippy::too_many_arguments)]
pub fn copy_buffer_to_texture( pub fn copy_buffer_to_texture(
&mut self, &mut self,
source_buffer: BufferId, source_buffer: BufferId,

View file

@ -276,7 +276,7 @@ impl RenderGraph {
} }
pub fn take_commands(&mut self) -> Commands { pub fn take_commands(&mut self) -> Commands {
std::mem::replace(&mut self.commands, Commands::default()) std::mem::take(&mut self.commands)
} }
} }

View file

@ -9,6 +9,7 @@ use uuid::Uuid;
pub struct NodeId(Uuid); pub struct NodeId(Uuid);
impl NodeId { impl NodeId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
NodeId(Uuid::new_v4()) NodeId(Uuid::new_v4())
} }
@ -48,7 +49,7 @@ pub struct Edges {
impl Edges { impl Edges {
pub(crate) fn add_input_edge(&mut self, edge: Edge) -> Result<(), RenderGraphError> { pub(crate) fn add_input_edge(&mut self, edge: Edge) -> Result<(), RenderGraphError> {
if self.has_input_edge(&edge) { if self.has_input_edge(&edge) {
return Err(RenderGraphError::EdgeAlreadyExists(edge.clone())); return Err(RenderGraphError::EdgeAlreadyExists(edge));
} }
self.input_edges.push(edge); self.input_edges.push(edge);
Ok(()) Ok(())
@ -56,7 +57,7 @@ impl Edges {
pub(crate) fn add_output_edge(&mut self, edge: Edge) -> Result<(), RenderGraphError> { pub(crate) fn add_output_edge(&mut self, edge: Edge) -> Result<(), RenderGraphError> {
if self.has_output_edge(&edge) { if self.has_output_edge(&edge) {
return Err(RenderGraphError::EdgeAlreadyExists(edge.clone())); return Err(RenderGraphError::EdgeAlreadyExists(edge));
} }
self.output_edges.push(edge); self.output_edges.push(edge);
Ok(()) Ok(())

View file

@ -98,6 +98,10 @@ impl ResourceSlots {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.slots.len() self.slots.len()
} }
pub fn is_empty(&self) -> bool {
self.slots.is_empty()
}
} }
impl From<&ResourceSlotInfo> for ResourceSlot { impl From<&ResourceSlotInfo> for ResourceSlot {

View file

@ -67,7 +67,7 @@ impl Node for TextureCopyNode {
texture_resource.get_texture().unwrap(), texture_resource.get_texture().unwrap(),
[0, 0, 0], [0, 0, 0],
0, 0,
texture_descriptor.size.clone(), texture_descriptor.size,
); );
render_context.resources().remove_buffer(texture_buffer); render_context.resources().remove_buffer(texture_buffer);
} }

View file

@ -75,7 +75,7 @@ impl Stages {
node_borrows.push((node, indices)); node_borrows.push((node, indices));
} }
node_borrows.sort_by_key(|(_node, indices)| indices.clone()); node_borrows.sort_by_key(|(_node, indices)| <&NodeIndices>::clone(indices));
let mut last_stage = usize::MAX; let mut last_stage = usize::MAX;
let mut last_job = usize::MAX; let mut last_job = usize::MAX;
for (node, indices) in node_borrows.drain(..) { for (node, indices) in node_borrows.drain(..) {
@ -159,7 +159,7 @@ impl RenderGraphStager for DependentNodeStager {
// get all nodes without input. this intentionally includes nodes with no outputs // get all nodes without input. this intentionally includes nodes with no outputs
let output_only_nodes = render_graph let output_only_nodes = render_graph
.iter_nodes() .iter_nodes()
.filter(|node| node.input_slots.len() == 0); .filter(|node| node.input_slots.is_empty());
let mut stages = vec![Stage::default()]; let mut stages = vec![Stage::default()];
let mut node_stages = HashMap::new(); let mut node_stages = HashMap::new();
for output_only_node in output_only_nodes { for output_only_node in output_only_nodes {
@ -190,8 +190,7 @@ fn stage_node(
.edges .edges
.input_edges .input_edges
.iter() .iter()
.find(|e| !node_stages_and_jobs.contains_key(&e.get_output_node())) .any(|e| !node_stages_and_jobs.contains_key(&e.get_output_node()))
.is_some()
{ {
return; return;
} }

View file

@ -16,6 +16,7 @@ pub trait RenderContext {
destination_offset: u64, destination_offset: u64,
size: u64, size: u64,
); );
#[allow(clippy::too_many_arguments)]
fn copy_buffer_to_texture( fn copy_buffer_to_texture(
&mut self, &mut self,
source_buffer: BufferId, source_buffer: BufferId,

View file

@ -4,6 +4,7 @@ use uuid::Uuid;
pub struct BufferId(Uuid); pub struct BufferId(Uuid);
impl BufferId { impl BufferId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
BufferId(Uuid::new_v4()) BufferId(Uuid::new_v4())
} }

View file

@ -1,5 +1,6 @@
mod bind_group; mod bind_group;
mod buffer; mod buffer;
#[allow(clippy::module_inception)]
mod render_resource; mod render_resource;
mod render_resource_bindings; mod render_resource_bindings;
mod shared_buffers; mod shared_buffers;

View file

@ -11,7 +11,7 @@ use std::{
}; };
use uuid::Uuid; use uuid::Uuid;
#[derive(Clone, Eq, PartialEq, Debug)] #[derive(Clone, Eq, Debug)]
pub enum RenderResourceBinding { pub enum RenderResourceBinding {
Buffer { Buffer {
buffer: BufferId, buffer: BufferId,
@ -48,6 +48,34 @@ impl RenderResourceBinding {
} }
} }
impl PartialEq for RenderResourceBinding {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(
RenderResourceBinding::Buffer {
buffer: self_buffer,
range: self_range,
dynamic_index: _,
},
RenderResourceBinding::Buffer {
buffer: other_buffer,
range: other_range,
dynamic_index: _,
},
) => self_buffer == other_buffer && self_range == other_range,
(
RenderResourceBinding::Texture(self_texture),
RenderResourceBinding::Texture(other_texture),
) => RenderResourceId::from(*self_texture) == RenderResourceId::from(*other_texture),
(
RenderResourceBinding::Sampler(self_sampler),
RenderResourceBinding::Sampler(other_sampler),
) => RenderResourceId::from(*self_sampler) == RenderResourceId::from(*other_sampler),
_ => false,
}
}
}
impl Hash for RenderResourceBinding { impl Hash for RenderResourceBinding {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self { match self {
@ -117,7 +145,7 @@ impl RenderResourceBindings {
for (name, (vertex_buffer, index_buffer)) in render_resource_bindings.vertex_buffers.iter() for (name, (vertex_buffer, index_buffer)) in render_resource_bindings.vertex_buffers.iter()
{ {
self.set_vertex_buffer(name, *vertex_buffer, index_buffer.clone()); self.set_vertex_buffer(name, *vertex_buffer, *index_buffer);
} }
} }
@ -241,7 +269,7 @@ impl AssetRenderResourceBindings {
pub fn get_or_insert_mut<T>(&mut self, handle: Handle<T>) -> &mut RenderResourceBindings { pub fn get_or_insert_mut<T>(&mut self, handle: Handle<T>) -> &mut RenderResourceBindings {
self.bindings self.bindings
.entry(HandleUntyped::from(handle)) .entry(HandleUntyped::from(handle))
.or_insert_with(|| RenderResourceBindings::default()) .or_insert_with(RenderResourceBindings::default)
} }
pub fn get_mut<T>(&mut self, handle: Handle<T>) -> Option<&mut RenderResourceBindings> { pub fn get_mut<T>(&mut self, handle: Handle<T>) -> Option<&mut RenderResourceBindings> {

View file

@ -85,7 +85,7 @@ impl SharedBuffers {
pub fn reset_command_queue(&self) -> CommandQueue { pub fn reset_command_queue(&self) -> CommandQueue {
let mut command_queue = self.command_queue.write().unwrap(); let mut command_queue = self.command_queue.write().unwrap();
std::mem::replace(&mut *command_queue, CommandQueue::default()) std::mem::take(&mut *command_queue)
} }
} }

View file

@ -4,6 +4,7 @@ use uuid::Uuid;
pub struct TextureId(Uuid); pub struct TextureId(Uuid);
impl TextureId { impl TextureId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
TextureId(Uuid::new_v4()) TextureId(Uuid::new_v4())
} }
@ -13,6 +14,7 @@ impl TextureId {
pub struct SamplerId(Uuid); pub struct SamplerId(Uuid);
impl SamplerId { impl SamplerId {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
SamplerId(Uuid::new_v4()) SamplerId(Uuid::new_v4())
} }

View file

@ -1,3 +1,4 @@
#[allow(clippy::module_inception)]
mod shader; mod shader;
mod shader_defs; mod shader_defs;
mod shader_reflect; mod shader_reflect;

View file

@ -61,7 +61,7 @@ impl ShaderLayout {
} else { } else {
let parts = vertex_attribute_descriptor let parts = vertex_attribute_descriptor
.name .name
.splitn(3, "_") .splitn(3, '_')
.collect::<Vec<&str>>(); .collect::<Vec<&str>>();
if parts.len() == 3 { if parts.len() == 3 {
if parts[0] == "I" { if parts[0] == "I" {
@ -82,15 +82,13 @@ impl ShaderLayout {
}; };
if let Some(current) = current_descriptor.as_mut() { if let Some(current) = current_descriptor.as_mut() {
if &current.name == &current_buffer_name { if current.name == current_buffer_name {
current.attributes.push(vertex_attribute_descriptor); current.attributes.push(vertex_attribute_descriptor);
continue; continue;
} else { } else if visited_buffer_descriptors.contains(&current_buffer_name) {
if visited_buffer_descriptors.contains(&current_buffer_name) {
panic!("Vertex attribute buffer names must be consecutive.") panic!("Vertex attribute buffer names must be consecutive.")
} }
} }
}
if let Some(current) = current_descriptor.take() { if let Some(current) = current_descriptor.take() {
visited_buffer_descriptors.insert(current.name.to_string()); visited_buffer_descriptors.insert(current.name.to_string());

View file

@ -13,7 +13,7 @@ impl AssetLoader<Texture> for HdrTextureLoader {
let format = TextureFormat::Rgba32Float; let format = TextureFormat::Rgba32Float;
debug_assert_eq!( debug_assert_eq!(
format.pixel_size(), format.pixel_size(),
4 * 4 * 1, 4 * 4,
"Format should have 32bit x 4 size" "Format should have 32bit x 4 size"
); );

View file

@ -108,28 +108,29 @@ impl AssetLoader<Texture> for ImageTextureLoader {
data = raw_data.as_slice().as_bytes().to_owned(); data = raw_data.as_slice().as_bytes().to_owned();
} }
image::DynamicImage::ImageRgb16(i) => {
width = i.width(); image::DynamicImage::ImageRgb16(image) => {
height = i.height(); width = image.width();
height = image.height();
format = TextureFormat::Rgba16Uint; format = TextureFormat::Rgba16Uint;
let mut d = let mut local_data =
Vec::with_capacity(width as usize * height as usize * format.pixel_size()); Vec::with_capacity(width as usize * height as usize * format.pixel_size());
for pixel in i.into_raw().chunks_exact(3) { for pixel in image.into_raw().chunks_exact(3) {
// TODO unsafe_get in release builds? // TODO unsafe_get in release builds?
let r = pixel[0]; let r = pixel[0];
let g = pixel[1]; let g = pixel[1];
let b = pixel[2]; let b = pixel[2];
let a = u16::max_value(); let a = u16::max_value();
d.extend_from_slice(&r.to_ne_bytes()); local_data.extend_from_slice(&r.to_ne_bytes());
d.extend_from_slice(&g.to_ne_bytes()); local_data.extend_from_slice(&g.to_ne_bytes());
d.extend_from_slice(&b.to_ne_bytes()); local_data.extend_from_slice(&b.to_ne_bytes());
d.extend_from_slice(&a.to_ne_bytes()); local_data.extend_from_slice(&a.to_ne_bytes());
} }
data = d; data = local_data;
} }
image::DynamicImage::ImageRgba16(i) => { image::DynamicImage::ImageRgba16(i) => {
width = i.width(); width = i.width();

View file

@ -3,6 +3,7 @@ mod hdr_texture_loader;
#[cfg(feature = "png")] #[cfg(feature = "png")]
mod image_texture_loader; mod image_texture_loader;
mod sampler_descriptor; mod sampler_descriptor;
#[allow(clippy::module_inception)]
mod texture; mod texture;
mod texture_descriptor; mod texture_descriptor;
mod texture_dimension; mod texture_dimension;

View file

@ -155,7 +155,7 @@ impl RenderResource for Option<Handle<Texture>> {
} }
fn texture(&self) -> Option<Handle<Texture>> { fn texture(&self) -> Option<Handle<Texture>> {
self.clone() *self
} }
} }
@ -171,6 +171,6 @@ impl RenderResource for Handle<Texture> {
} }
fn texture(&self) -> Option<Handle<Texture>> { fn texture(&self) -> Option<Handle<Texture>> {
Some(self.clone()) Some(*self)
} }
} }

View file

@ -14,6 +14,7 @@ pub struct Error {
pub type Result<T> = std::result::Result<T, Error>; pub type Result<T> = std::result::Result<T, Error>;
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
#[non_exhaustive]
pub enum ErrorCode { pub enum ErrorCode {
Io(String), Io(String),
Message(String), Message(String),
@ -54,9 +55,6 @@ pub enum ErrorCode {
Utf8Error(Utf8Error), Utf8Error(Utf8Error),
TrailingCharacters, TrailingCharacters,
#[doc(hidden)]
__Nonexhaustive,
} }
impl fmt::Display for Error { impl fmt::Display for Error {
@ -108,7 +106,6 @@ impl fmt::Display for ErrorCode {
ErrorCode::UnderscoreAtBeginning => f.write_str("Found underscore at the beginning"), ErrorCode::UnderscoreAtBeginning => f.write_str("Found underscore at the beginning"),
ErrorCode::UnexpectedByte(_) => f.write_str("Unexpected byte"), ErrorCode::UnexpectedByte(_) => f.write_str("Unexpected byte"),
ErrorCode::TrailingCharacters => f.write_str("Non-whitespace trailing characters"), ErrorCode::TrailingCharacters => f.write_str("Non-whitespace trailing characters"),
_ => f.write_str("Unknown ErrorCode"),
} }
} }
} }

View file

@ -62,6 +62,7 @@ struct Pretty {
/// .indentor("\t".to_owned()); /// .indentor("\t".to_owned());
/// ``` /// ```
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct PrettyConfig { pub struct PrettyConfig {
/// Limit the pretty-ness up to the given depth. /// Limit the pretty-ness up to the given depth.
#[serde(default = "default_depth_limit")] #[serde(default = "default_depth_limit")]
@ -83,9 +84,6 @@ pub struct PrettyConfig {
pub decimal_floats: bool, pub decimal_floats: bool,
/// Enable extensions. Only configures 'implicit_some' for now. /// Enable extensions. Only configures 'implicit_some' for now.
pub extensions: Extensions, pub extensions: Extensions,
/// Private field to ensure adding a field is non-breaking.
#[serde(skip)]
_future_proof: (),
} }
impl PrettyConfig { impl PrettyConfig {
@ -206,7 +204,6 @@ impl Default for PrettyConfig {
enumerate_arrays: default_enumerate_arrays(), enumerate_arrays: default_enumerate_arrays(),
extensions: Extensions::default(), extensions: Extensions::default(),
decimal_floats: default_decimal_floats(), decimal_floats: default_decimal_floats(),
_future_proof: (),
} }
} }
} }
@ -391,7 +388,7 @@ impl<'a, W: io::Write> ser::Serializer for &'a mut Serializer<W> {
fn serialize_f32(self, v: f32) -> Result<()> { fn serialize_f32(self, v: f32) -> Result<()> {
write!(self.output, "{}", v)?; write!(self.output, "{}", v)?;
// TODO: use f32::EPSILON when minimum supported rust version is 1.43 // TODO: use f32::EPSILON when minimum supported rust version is 1.43
pub const EPSILON: f32 = 1.19209290e-07_f32; pub const EPSILON: f32 = 1.1920929e-7_f32;
if self.decimal_floats() && (v - v.floor()).abs() < EPSILON { if self.decimal_floats() && (v - v.floor()).abs() < EPSILON {
write!(self.output, ".0")?; write!(self.output, ".0")?;
} }
@ -401,7 +398,7 @@ impl<'a, W: io::Write> ser::Serializer for &'a mut Serializer<W> {
fn serialize_f64(self, v: f64) -> Result<()> { fn serialize_f64(self, v: f64) -> Result<()> {
write!(self.output, "{}", v)?; write!(self.output, "{}", v)?;
// TODO: use f64::EPSILON when minimum supported rust version is 1.43 // TODO: use f64::EPSILON when minimum supported rust version is 1.43
pub const EPSILON: f64 = 2.2204460492503131e-16_f64; pub const EPSILON: f64 = 2.220446049250313e-16_f64;
if self.decimal_floats() && (v - v.floor()).abs() < EPSILON { if self.decimal_floats() && (v - v.floor()).abs() < EPSILON {
write!(self.output, ".0")?; write!(self.output, ".0")?;
} }

View file

@ -73,7 +73,7 @@ impl SceneSpawner {
let spawned = self let spawned = self
.spawned_scenes .spawned_scenes
.entry(scene_handle) .entry(scene_handle)
.or_insert_with(|| Vec::new()); .or_insert_with(Vec::new);
spawned.push(instance_id); spawned.push(instance_id);
Ok(()) Ok(())
} }
@ -98,7 +98,7 @@ impl SceneSpawner {
*instance_info *instance_info
.entity_map .entity_map
.entry(scene_entity.entity) .entry(scene_entity.entity)
.or_insert_with(|| bevy_ecs::Entity::new()) .or_insert_with(bevy_ecs::Entity::new)
} else { } else {
bevy_ecs::Entity::from_id(scene_entity.entity) bevy_ecs::Entity::from_id(scene_entity.entity)
}; };

View file

@ -109,10 +109,10 @@ pub fn build_sprite_pipeline(shaders: &mut Assets<Shader>) -> PipelineDescriptor
} }
pub mod node { pub mod node {
pub const COLOR_MATERIAL: &'static str = "color_material"; pub const COLOR_MATERIAL: &str = "color_material";
pub const SPRITE: &'static str = "sprite"; pub const SPRITE: &str = "sprite";
pub const SPRITE_SHEET: &'static str = "sprite_sheet"; pub const SPRITE_SHEET: &str = "sprite_sheet";
pub const SPRITE_SHEET_SPRITE: &'static str = "sprite_sheet_sprite"; pub const SPRITE_SHEET_SPRITE: &str = "sprite_sheet_sprite";
} }
pub trait SpriteRenderGraphBuilder { pub trait SpriteRenderGraphBuilder {

View file

@ -93,6 +93,10 @@ impl TextureAtlas {
self.textures.len() self.textures.len()
} }
pub fn is_empty(&self) -> bool {
self.textures.is_empty()
}
pub fn get_texture_index(&self, texture: Handle<Texture>) -> Option<usize> { pub fn get_texture_index(&self, texture: Handle<Texture>) -> Option<usize> {
self.texture_handles self.texture_handles
.as_ref() .as_ref()

View file

@ -25,7 +25,7 @@ impl Default for Translation {
impl From<Vec3> for Translation { impl From<Vec3> for Translation {
fn from(translation: Vec3) -> Self { fn from(translation: Vec3) -> Self {
Self(Vec3::from(translation)) Self(translation)
} }
} }

View file

@ -32,7 +32,7 @@ impl WorldWriter for InsertChildren {
// NOTE: ideally this is just an else statement, but currently that _incorrectly_ fails borrow-checking // NOTE: ideally this is just an else statement, but currently that _incorrectly_ fails borrow-checking
if !added { if !added {
world world
.insert_one(self.parent, Children(SmallVec::from(self.children))) .insert_one(self.parent, Children(self.children))
.unwrap(); .unwrap();
} }
} }
@ -73,7 +73,7 @@ impl WorldWriter for PushChildren {
// NOTE: ideally this is just an else statement, but currently that _incorrectly_ fails borrow-checking // NOTE: ideally this is just an else statement, but currently that _incorrectly_ fails borrow-checking
if !added { if !added {
world world
.insert_one(self.parent, Children(SmallVec::from(self.children))) .insert_one(self.parent, Children(self.children))
.unwrap(); .unwrap();
} }
} }

View file

@ -14,13 +14,10 @@ where
{ {
// TODO: not a huge fan of this pattern. are there ways to do recursive updates in legion without allocations? // TODO: not a huge fan of this pattern. are there ways to do recursive updates in legion without allocations?
// TODO: the problem above might be resolvable with world splitting // TODO: the problem above might be resolvable with world splitting
let children = children_query.get::<Children>(entity).ok().map(|children| { let children = children_query
children .get::<Children>(entity)
.0 .ok()
.iter() .map(|children| children.0.iter().cloned().collect::<Vec<Entity>>());
.map(|entity| *entity)
.collect::<Vec<Entity>>()
});
let parent_result = run(state, entity, parent_result, previous_result); let parent_result = run(state, entity, parent_result, previous_result);
previous_result = None; previous_result = None;
@ -47,13 +44,11 @@ pub struct DespawnRecursive {
} }
fn despawn_with_children_recursive(world: &mut World, entity: Entity) { fn despawn_with_children_recursive(world: &mut World, entity: Entity) {
if let Some(children) = world.get::<Children>(entity).ok().map(|children| { if let Some(children) = world
children .get::<Children>(entity)
.0 .ok()
.iter() .map(|children| children.0.iter().cloned().collect::<Vec<Entity>>())
.map(|entity| *entity) {
.collect::<Vec<Entity>>()
}) {
for e in children { for e in children {
despawn_with_children_recursive(world, e); despawn_with_children_recursive(world, e);
} }

View file

@ -1,4 +1,5 @@
mod child_builder; mod child_builder;
#[allow(clippy::module_inception)]
mod hierarchy; mod hierarchy;
mod hierarchy_maintenance_system; mod hierarchy_maintenance_system;
mod world_child_builder; mod world_child_builder;

View file

@ -59,12 +59,10 @@ impl ComponentRegistry {
let mut registration = self.get_with_short_name(type_name); let mut registration = self.get_with_short_name(type_name);
if registration.is_none() { if registration.is_none() {
registration = self.get_with_full_name(type_name); registration = self.get_with_full_name(type_name);
if registration.is_none() { if registration.is_none() && self.ambigous_names.contains(type_name) {
if self.ambigous_names.contains(type_name) {
panic!("Type name is ambiguous: {}", type_name); panic!("Type name is ambiguous: {}", type_name);
} }
} }
}
registration registration
} }
} }
@ -99,11 +97,7 @@ impl ComponentRegistration {
component_properties_fn: |archetype: &Archetype, index: usize| { component_properties_fn: |archetype: &Archetype, index: usize| {
// the type has been looked up by the caller, so this is safe // the type has been looked up by the caller, so this is safe
unsafe { unsafe {
let ptr = archetype let ptr = archetype.get::<T>().unwrap().as_ptr().add(index);
.get::<T>()
.unwrap()
.as_ptr()
.offset(index as isize);
ptr.as_ref().unwrap() ptr.as_ref().unwrap()
} }
}, },

View file

@ -44,8 +44,8 @@ impl From<&Style> for stretch::style::Style {
margin: from_rect(value.margin), margin: from_rect(value.margin),
padding: from_rect(value.padding), padding: from_rect(value.padding),
border: from_rect(value.border), border: from_rect(value.border),
flex_grow: value.flex_grow.into(), flex_grow: value.flex_grow,
flex_shrink: value.flex_shrink.into(), flex_shrink: value.flex_shrink,
flex_basis: value.flex_basis.into(), flex_basis: value.flex_basis.into(),
size: from_size(value.size), size: from_size(value.size),
min_size: from_size(value.min_size), min_size: from_size(value.min_size),

View file

@ -31,8 +31,7 @@ impl FlexSurface {
let stretch_style = style.into(); let stretch_style = style.into();
let stretch_node = self.entity_to_stretch.entry(entity).or_insert_with(|| { let stretch_node = self.entity_to_stretch.entry(entity).or_insert_with(|| {
added = true; added = true;
let stretch_node = stretch.new_node(stretch_style, Vec::new()).unwrap(); stretch.new_node(stretch_style, Vec::new()).unwrap()
stretch_node
}); });
if !added { if !added {

View file

@ -33,7 +33,7 @@ use update::ui_z_system;
pub struct UiPlugin; pub struct UiPlugin;
pub mod stage { pub mod stage {
pub const UI: &'static str = "ui"; pub const UI: &str = "ui";
} }
impl Plugin for UiPlugin { impl Plugin for UiPlugin {

View file

@ -66,13 +66,13 @@ pub fn build_ui_pipeline(shaders: &mut Assets<Shader>) -> PipelineDescriptor {
} }
pub mod node { pub mod node {
pub const UI_CAMERA: &'static str = "ui_camera"; pub const UI_CAMERA: &str = "ui_camera";
pub const NODE: &'static str = "node"; pub const NODE: &str = "node";
pub const UI_PASS: &'static str = "ui_pass"; pub const UI_PASS: &str = "ui_pass";
} }
pub mod camera { pub mod camera {
pub const UI_CAMERA: &'static str = "UiCamera"; pub const UI_CAMERA: &str = "UiCamera";
} }
pub trait UiRenderGraphBuilder { pub trait UiRenderGraphBuilder {

View file

@ -51,5 +51,5 @@ fn update_node_entity(
position.set_z(z); position.set_z(z);
transform.set_w_axis(position); transform.set_w_axis(position);
return Some(global_z); Some(global_z)
} }

View file

@ -21,15 +21,15 @@ pub fn image_node_system(
mut query: Query<(&Image, &mut CalculatedSize, &Handle<ColorMaterial>)>, mut query: Query<(&Image, &mut CalculatedSize, &Handle<ColorMaterial>)>,
) { ) {
for (_image, mut calculated_size, material_handle) in &mut query.iter() { for (_image, mut calculated_size, material_handle) in &mut query.iter() {
materials if let Some(texture) = materials
.get(material_handle) .get(material_handle)
.and_then(|material| material.texture) .and_then(|material| material.texture)
.and_then(|texture_handle| textures.get(&texture_handle)) .and_then(|texture_handle| textures.get(&texture_handle))
.map(|texture| { {
calculated_size.size = Size { calculated_size.size = Size {
width: texture.size.x(), width: texture.size.x(),
height: texture.size.y(), height: texture.size.y(),
}; };
}); }
} }
} }

View file

@ -49,6 +49,7 @@ pub fn text_system(
} }
} }
#[allow(clippy::too_many_arguments)]
pub fn draw_text_system( pub fn draw_text_system(
mut draw_context: DrawContext, mut draw_context: DrawContext,
fonts: Res<Assets<Font>>, fonts: Res<Assets<Font>>,

View file

@ -59,6 +59,7 @@ impl WgpuRenderResourceContext {
); );
} }
#[allow(clippy::too_many_arguments)]
pub fn copy_buffer_to_texture( pub fn copy_buffer_to_texture(
&self, &self,
command_encoder: &mut wgpu::CommandEncoder, command_encoder: &mut wgpu::CommandEncoder,
@ -466,7 +467,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
RenderResourceBinding::Texture(resource) => { RenderResourceBinding::Texture(resource) => {
let texture_view = texture_views let texture_view = texture_views
.get(&resource) .get(&resource)
.expect(&format!("{:?}", resource)); .unwrap_or_else(|| panic!("{:?}", resource));
wgpu::BindingResource::TextureView(texture_view) wgpu::BindingResource::TextureView(texture_view)
} }
RenderResourceBinding::Sampler(resource) => { RenderResourceBinding::Sampler(resource) => {
@ -495,7 +496,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
let bind_group_info = bind_groups let bind_group_info = bind_groups
.entry(bind_group_descriptor_id) .entry(bind_group_descriptor_id)
.or_insert_with(|| WgpuBindGroupInfo::default()); .or_insert_with(WgpuBindGroupInfo::default);
bind_group_info bind_group_info
.bind_groups .bind_groups
.insert(bind_group.id, wgpu_bind_group); .insert(bind_group.id, wgpu_bind_group);
@ -540,7 +541,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
let buffer_slice = buffer.slice(..); let buffer_slice = buffer.slice(..);
let data = buffer_slice.map_async(wgpu::MapMode::Write); let data = buffer_slice.map_async(wgpu::MapMode::Write);
self.device.poll(wgpu::Maintain::Wait); self.device.poll(wgpu::Maintain::Wait);
if let Err(_) = pollster::block_on(data) { if pollster::block_on(data).is_err() {
panic!("failed to map buffer to host"); panic!("failed to map buffer to host");
} }
} }

View file

@ -61,7 +61,7 @@ impl<'a> RenderPass for WgpuRenderPass<'a> {
.get(&bind_group_descriptor_id) .get(&bind_group_descriptor_id)
{ {
if let Some(wgpu_bind_group) = bind_group_info.bind_groups.get(&bind_group) { if let Some(wgpu_bind_group) = bind_group_info.bind_groups.get(&bind_group) {
const EMPTY: &'static [u32] = &[]; const EMPTY: &[u32] = &[];
let dynamic_uniform_indices = let dynamic_uniform_indices =
if let Some(dynamic_uniform_indices) = dynamic_uniform_indices { if let Some(dynamic_uniform_indices) = dynamic_uniform_indices {
dynamic_uniform_indices dynamic_uniform_indices

View file

@ -182,16 +182,12 @@ impl WgpuFrom<&BindType> for wgpu::BindingType {
match bind_type { match bind_type {
BindType::Uniform { dynamic, .. } => wgpu::BindingType::UniformBuffer { BindType::Uniform { dynamic, .. } => wgpu::BindingType::UniformBuffer {
dynamic: *dynamic, dynamic: *dynamic,
min_binding_size: bind_type min_binding_size: bind_type.get_uniform_size().and_then(wgpu::BufferSize::new),
.get_uniform_size()
.and_then(|size| wgpu::BufferSize::new(size)),
}, },
BindType::StorageBuffer { dynamic, readonly } => wgpu::BindingType::StorageBuffer { BindType::StorageBuffer { dynamic, readonly } => wgpu::BindingType::StorageBuffer {
dynamic: *dynamic, dynamic: *dynamic,
readonly: *readonly, readonly: *readonly,
min_binding_size: bind_type min_binding_size: bind_type.get_uniform_size().and_then(wgpu::BufferSize::new),
.get_uniform_size()
.and_then(|size| wgpu::BufferSize::new(size)),
}, },
BindType::SampledTexture { BindType::SampledTexture {
dimension, dimension,
@ -529,7 +525,7 @@ impl WgpuFrom<SamplerDescriptor> for wgpu::SamplerDescriptor<'_> {
lod_min_clamp: sampler_descriptor.lod_min_clamp, lod_min_clamp: sampler_descriptor.lod_min_clamp,
lod_max_clamp: sampler_descriptor.lod_max_clamp, lod_max_clamp: sampler_descriptor.lod_max_clamp,
compare: sampler_descriptor.compare_function.map(|c| c.wgpu_into()), compare: sampler_descriptor.compare_function.map(|c| c.wgpu_into()),
anisotropy_clamp: sampler_descriptor.anisotropy_clamp.clone(), anisotropy_clamp: sampler_descriptor.anisotropy_clamp,
..Default::default() ..Default::default()
} }
} }

View file

@ -44,11 +44,11 @@ impl Plugin for WindowPlugin {
let window_descriptor = resources let window_descriptor = resources
.get::<WindowDescriptor>() .get::<WindowDescriptor>()
.map(|descriptor| (*descriptor).clone()) .map(|descriptor| (*descriptor).clone())
.unwrap_or_else(|| WindowDescriptor::default()); .unwrap_or_else(WindowDescriptor::default);
let mut create_window_event = resources.get_mut::<Events<CreateWindow>>().unwrap(); let mut create_window_event = resources.get_mut::<Events<CreateWindow>>().unwrap();
create_window_event.send(CreateWindow { create_window_event.send(CreateWindow {
id: WindowId::primary(), id: WindowId::primary(),
descriptor: window_descriptor.clone(), descriptor: window_descriptor,
}); });
} }

View file

@ -15,8 +15,12 @@ pub fn exit_on_window_close_system(
mut app_exit_events: ResMut<Events<AppExit>>, mut app_exit_events: ResMut<Events<AppExit>>,
window_close_requested_events: Res<Events<WindowCloseRequested>>, window_close_requested_events: Res<Events<WindowCloseRequested>>,
) { ) {
for _ in state.event_reader.iter(&window_close_requested_events) { if state
.event_reader
.iter(&window_close_requested_events)
.next()
.is_some()
{
app_exit_events.send(AppExit); app_exit_events.send(AppExit);
break;
} }
} }

View file

@ -15,9 +15,13 @@ impl WindowId {
pub fn is_primary(&self) -> bool { pub fn is_primary(&self) -> bool {
*self == WindowId::primary() *self == WindowId::primary()
} }
}
pub fn to_string(&self) -> String { use std::fmt;
self.0.to_simple().to_string()
impl fmt::Display for WindowId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.to_simple().fmt(f)
} }
} }
@ -65,6 +69,7 @@ impl Window {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[allow(clippy::manual_non_exhaustive)]
pub struct WindowDescriptor { pub struct WindowDescriptor {
pub width: u32, pub width: u32,
pub height: u32, pub height: u32,
@ -72,6 +77,11 @@ pub struct WindowDescriptor {
pub vsync: bool, pub vsync: bool,
pub resizable: bool, pub resizable: bool,
pub mode: WindowMode, pub mode: WindowMode,
// this is a manual implementation of the non exhaustive pattern,
// especially made to allow ..Default::default()
#[doc(hidden)]
pub __non_exhaustive: (),
} }
impl Default for WindowDescriptor { impl Default for WindowDescriptor {
@ -83,6 +93,7 @@ impl Default for WindowDescriptor {
vsync: true, vsync: true,
resizable: true, resizable: true,
mode: WindowMode::Windowed, mode: WindowMode::Windowed,
__non_exhaustive: (),
} }
} }
} }

View file

@ -7,9 +7,7 @@ pub fn convert_keyboard_input(keyboard_input: &winit::event::KeyboardInput) -> K
KeyboardInput { KeyboardInput {
scan_code: keyboard_input.scancode, scan_code: keyboard_input.scancode,
state: convert_element_state(keyboard_input.state), state: convert_element_state(keyboard_input.state),
key_code: keyboard_input key_code: keyboard_input.virtual_keycode.map(convert_virtual_key_code),
.virtual_keycode
.map(|v| convert_virtual_key_code(v)),
} }
} }

View file

@ -115,22 +115,21 @@ pub fn winit_runner(mut app: App) {
let mut mouse_button_input_events = let mut mouse_button_input_events =
app.resources.get_mut::<Events<MouseButtonInput>>().unwrap(); app.resources.get_mut::<Events<MouseButtonInput>>().unwrap();
mouse_button_input_events.send(MouseButtonInput { mouse_button_input_events.send(MouseButtonInput {
button: converters::convert_mouse_button(button.into()), button: converters::convert_mouse_button(button),
state: converters::convert_element_state(state), state: converters::convert_element_state(state),
}); });
} }
_ => {} _ => {}
}, },
event::Event::DeviceEvent { ref event, .. } => match event { event::Event::DeviceEvent { ref event, .. } => {
DeviceEvent::MouseMotion { delta } => { if let DeviceEvent::MouseMotion { delta } = event {
let mut mouse_motion_events = let mut mouse_motion_events =
app.resources.get_mut::<Events<MouseMotion>>().unwrap(); app.resources.get_mut::<Events<MouseMotion>>().unwrap();
mouse_motion_events.send(MouseMotion { mouse_motion_events.send(MouseMotion {
delta: Vec2::new(delta.0 as f32, delta.1 as f32), delta: Vec2::new(delta.0 as f32, delta.1 as f32),
}); });
} }
_ => {} }
},
event::Event::MainEventsCleared => { event::Event::MainEventsCleared => {
handle_create_window_events( handle_create_window_events(
&mut app.resources, &mut app.resources,

View file

@ -17,7 +17,7 @@ struct Rotator;
/// rotates the parent, which will result in the child also rotating /// rotates the parent, which will result in the child also rotating
fn rotator_system(time: Res<Time>, mut query: Query<(&Rotator, &mut Rotation)>) { fn rotator_system(time: Res<Time>, mut query: Query<(&Rotator, &mut Rotation)>) {
for (_rotator, mut rotation) in &mut query.iter() { for (_rotator, mut rotation) in &mut query.iter() {
rotation.0 = rotation.0 * Quat::from_rotation_x(3.0 * time.delta_seconds); rotation.0 *= Quat::from_rotation_x(3.0 * time.delta_seconds);
} }
} }

View file

@ -21,7 +21,7 @@ struct Rotator;
/// rotates the parent, which will result in the child also rotating /// rotates the parent, which will result in the child also rotating
fn rotator_system(time: Res<Time>, mut query: Query<(&Rotator, &mut Rotation)>) { fn rotator_system(time: Res<Time>, mut query: Query<(&Rotator, &mut Rotation)>) {
for (_rotator, mut rotation) in &mut query.iter() { for (_rotator, mut rotation) in &mut query.iter() {
rotation.0 = rotation.0 * Quat::from_rotation_x(3.0 * time.delta_seconds); rotation.0 *= Quat::from_rotation_x(3.0 * time.delta_seconds);
} }
} }

View file

@ -74,7 +74,7 @@ fn setup(
font: asset_server.load("assets/fonts/FiraSans-Bold.ttf").unwrap(), font: asset_server.load("assets/fonts/FiraSans-Bold.ttf").unwrap(),
value: "Score:".to_string(), value: "Score:".to_string(),
style: TextStyle { style: TextStyle {
color: Color::rgb(0.2, 0.2, 0.8).into(), color: Color::rgb(0.2, 0.2, 0.8),
font_size: 40.0, font_size: 40.0,
}, },
}, },
@ -219,7 +219,7 @@ fn ball_collision_system(
let collision = collide(ball_translation.0, ball_size, translation.0, sprite.size); let collision = collide(ball_translation.0, ball_size, translation.0, sprite.size);
if let Some(collision) = collision { if let Some(collision) = collision {
// scorable colliders should be despawned and increment the scoreboard on collision // scorable colliders should be despawned and increment the scoreboard on collision
if let &Collider::Scorable = collider { if let Collider::Scorable = *collider {
scoreboard.score += 1; scoreboard.score += 1;
commands.despawn(collider_entity); commands.despawn(collider_entity);
} }

View file

@ -38,7 +38,7 @@ fn atlas_render_system(
return; return;
} }
if let Some(set) = font_atlas_sets.get(&state.handle.as_handle::<FontAtlasSet>()) { if let Some(set) = font_atlas_sets.get(&state.handle.as_handle::<FontAtlasSet>()) {
for (_size, font_atlas) in set.iter() { if let Some((_size, font_atlas)) = set.iter().next() {
state.added = true; state.added = true;
let texture_atlas = texture_atlases.get(&font_atlas.texture_atlas).unwrap(); let texture_atlas = texture_atlases.get(&font_atlas.texture_atlas).unwrap();
commands.spawn(ImageComponents { commands.spawn(ImageComponents {
@ -54,7 +54,6 @@ fn atlas_render_system(
}, },
..Default::default() ..Default::default()
}); });
break;
} }
} }
} }