mirror of
https://github.com/bevyengine/bevy
synced 2024-11-10 07:04:33 +00:00
small and mostly pointless refactoring (#2934)
What is says on the tin. This has got more to do with making `clippy` slightly more *quiet* than it does with changing anything that might greatly impact readability or performance. that said, deriving `Default` for a couple of structs is a nice easy win
This commit is contained in:
parent
803e8cdf80
commit
d8974e7c3d
99 changed files with 329 additions and 304 deletions
|
@ -373,9 +373,10 @@ impl App {
|
|||
system: impl IntoSystemDescriptor<Params>,
|
||||
) -> &mut Self {
|
||||
use std::any::TypeId;
|
||||
if stage_label.type_id() == TypeId::of::<StartupStage>() {
|
||||
panic!("add systems to a startup stage using App::add_startup_system_to_stage");
|
||||
}
|
||||
assert!(
|
||||
stage_label.type_id() != TypeId::of::<StartupStage>(),
|
||||
"add systems to a startup stage using App::add_startup_system_to_stage"
|
||||
);
|
||||
self.schedule.add_system_to_stage(stage_label, system);
|
||||
self
|
||||
}
|
||||
|
@ -407,9 +408,10 @@ impl App {
|
|||
system_set: SystemSet,
|
||||
) -> &mut Self {
|
||||
use std::any::TypeId;
|
||||
if stage_label.type_id() == TypeId::of::<StartupStage>() {
|
||||
panic!("add system sets to a startup stage using App::add_startup_system_set_to_stage");
|
||||
}
|
||||
assert!(
|
||||
stage_label.type_id() != TypeId::of::<StartupStage>(),
|
||||
"add system sets to a startup stage using App::add_startup_system_set_to_stage"
|
||||
);
|
||||
self.schedule
|
||||
.add_system_set_to_stage(stage_label, system_set);
|
||||
self
|
||||
|
|
|
@ -355,11 +355,11 @@ impl AssetServer {
|
|||
});
|
||||
|
||||
// load asset dependencies and prepare asset type hashmap
|
||||
for (label, loaded_asset) in load_context.labeled_assets.iter_mut() {
|
||||
for (label, loaded_asset) in &mut load_context.labeled_assets {
|
||||
let label_id = LabelId::from(label.as_ref().map(|label| label.as_str()));
|
||||
let type_uuid = loaded_asset.value.as_ref().unwrap().type_uuid();
|
||||
source_info.asset_types.insert(label_id, type_uuid);
|
||||
for dependency in loaded_asset.dependencies.iter() {
|
||||
for dependency in &loaded_asset.dependencies {
|
||||
self.load_untracked(dependency.clone(), false);
|
||||
}
|
||||
}
|
||||
|
@ -484,7 +484,7 @@ impl AssetServer {
|
|||
|
||||
fn create_assets_in_load_context(&self, load_context: &mut LoadContext) {
|
||||
let asset_lifecycles = self.server.asset_lifecycles.read();
|
||||
for (label, asset) in load_context.labeled_assets.iter_mut() {
|
||||
for (label, asset) in &mut load_context.labeled_assets {
|
||||
let asset_value = asset
|
||||
.value
|
||||
.take()
|
||||
|
@ -674,7 +674,7 @@ mod test {
|
|||
extensions == vec!["v1.2.3.pong", "2.3.pong", "3.pong", "pong"],
|
||||
_ => false,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -216,14 +216,14 @@ impl<T: Asset> Assets<T> {
|
|||
///
|
||||
/// Keeps the allocated memory for reuse.
|
||||
pub fn clear(&mut self) {
|
||||
self.assets.clear()
|
||||
self.assets.clear();
|
||||
}
|
||||
|
||||
/// Reserves capacity for at least additional more elements to be inserted into the assets.
|
||||
///
|
||||
/// The collection may reserve more space to avoid frequent reallocations.
|
||||
pub fn reserve(&mut self, additional: usize) {
|
||||
self.assets.reserve(additional)
|
||||
self.assets.reserve(additional);
|
||||
}
|
||||
|
||||
/// Shrinks the capacity of the asset map as much as possible.
|
||||
|
@ -231,7 +231,7 @@ impl<T: Asset> Assets<T> {
|
|||
/// It will drop down as much as possible while maintaining the internal rules and possibly
|
||||
/// leaving some space in accordance with the resize policy.
|
||||
pub fn shrink_to_fit(&mut self) {
|
||||
self.assets.shrink_to_fit()
|
||||
self.assets.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub fn asset_event_system(
|
||||
|
@ -241,7 +241,7 @@ impl<T: Asset> Assets<T> {
|
|||
// Check if the events are empty before calling `drain`.
|
||||
// As `drain` triggers change detection.
|
||||
if !assets.events.is_empty() {
|
||||
events.send_batch(assets.events.drain())
|
||||
events.send_batch(assets.events.drain());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,6 +331,6 @@ mod tests {
|
|||
let handle = assets_before.add(MyAsset);
|
||||
app.add_asset::<MyAsset>(); // Ensure this doesn't overwrite the Asset
|
||||
let assets_after = app.world.get_resource_mut::<Assets<MyAsset>>().unwrap();
|
||||
assert!(assets_after.get(handle).is_some())
|
||||
assert!(assets_after.get(handle).is_some());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,8 +179,9 @@ impl<T: Asset> Handle<T> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn clone_weak(&self) -> Self {
|
||||
Handle::weak(self.id)
|
||||
Self::weak(self.id)
|
||||
}
|
||||
|
||||
pub fn clone_untyped(&self) -> HandleUntyped {
|
||||
|
@ -327,8 +328,9 @@ impl HandleUntyped {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn clone_weak(&self) -> HandleUntyped {
|
||||
HandleUntyped::weak(self.id)
|
||||
#[must_use]
|
||||
pub fn clone_weak(&self) -> Self {
|
||||
Self::weak(self.id)
|
||||
}
|
||||
|
||||
pub fn is_weak(&self) -> bool {
|
||||
|
@ -344,9 +346,10 @@ impl HandleUntyped {
|
|||
/// The new handle will maintain the Strong or Weak status of the current handle.
|
||||
pub fn typed<T: Asset>(mut self) -> Handle<T> {
|
||||
if let HandleId::Id(type_uuid, _) = self.id {
|
||||
if T::TYPE_UUID != type_uuid {
|
||||
panic!("Attempted to convert handle to invalid type.");
|
||||
}
|
||||
assert!(
|
||||
T::TYPE_UUID == type_uuid,
|
||||
"Attempted to convert handle to invalid type."
|
||||
);
|
||||
}
|
||||
let handle_type = match &self.handle_type {
|
||||
HandleType::Strong(sender) => HandleType::Strong(sender.clone()),
|
||||
|
|
|
@ -155,7 +155,7 @@ pub fn filesystem_watcher_system(asset_server: Res<AssetServer>) {
|
|||
..
|
||||
} = event
|
||||
{
|
||||
for path in paths.iter() {
|
||||
for path in &paths {
|
||||
if !changed.contains(path) {
|
||||
let relative_path = path.strip_prefix(&asset_io.root_path).unwrap();
|
||||
let _ = asset_server.load_untracked(relative_path.into(), true);
|
||||
|
|
|
@ -47,11 +47,13 @@ impl<T: Asset> LoadedAsset<T> {
|
|||
self.dependencies.push(asset_path.to_owned());
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_dependency(mut self, asset_path: AssetPath) -> Self {
|
||||
self.add_dependency(asset_path);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_dependencies(mut self, mut asset_paths: Vec<AssetPath<'static>>) -> Self {
|
||||
for asset_path in asset_paths.drain(..) {
|
||||
self.add_dependency(asset_path);
|
||||
|
@ -132,7 +134,7 @@ impl<'a> LoadContext<'a> {
|
|||
|
||||
pub fn get_asset_metas(&self) -> Vec<AssetMeta> {
|
||||
let mut asset_metas = Vec::new();
|
||||
for (label, asset) in self.labeled_assets.iter() {
|
||||
for (label, asset) in &self.labeled_assets {
|
||||
asset_metas.push(AssetMeta {
|
||||
dependencies: asset.dependencies.clone(),
|
||||
label: label.clone(),
|
||||
|
@ -182,7 +184,7 @@ impl<T: AssetDynamic> AssetLifecycle for AssetLifecycleChannel<T> {
|
|||
id,
|
||||
version,
|
||||
}))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
} else {
|
||||
panic!(
|
||||
"Failed to downcast asset to {}.",
|
||||
|
|
|
@ -41,10 +41,10 @@ impl Hash for FloatOrd {
|
|||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
if self.0.is_nan() {
|
||||
// Ensure all NaN representations hash to the same value
|
||||
state.write(bytemuck::bytes_of(&f32::NAN))
|
||||
state.write(bytemuck::bytes_of(&f32::NAN));
|
||||
} else if self.0 == 0.0 {
|
||||
// Ensure both zeroes hash to the same value
|
||||
state.write(bytemuck::bytes_of(&0.0f32))
|
||||
state.write(bytemuck::bytes_of(&0.0f32));
|
||||
} else {
|
||||
state.write(bytemuck::bytes_of(&self.0));
|
||||
}
|
||||
|
|
|
@ -109,6 +109,7 @@ impl FixedTimestep {
|
|||
|
||||
/// Sets the label for the timestep. Setting a label allows a timestep
|
||||
/// to be observed by the global [`FixedTimesteps`] resource.
|
||||
#[must_use]
|
||||
pub fn with_label(mut self, label: &str) -> Self {
|
||||
self.state.label = Some(label.to_string());
|
||||
self
|
||||
|
@ -197,7 +198,7 @@ impl System for FixedTimestep {
|
|||
}
|
||||
|
||||
fn apply_buffers(&mut self, world: &mut World) {
|
||||
self.internal_system.apply_buffers(world)
|
||||
self.internal_system.apply_buffers(world);
|
||||
}
|
||||
|
||||
fn initialize(&mut self, world: &mut World) {
|
||||
|
|
|
@ -180,7 +180,7 @@ impl Timer {
|
|||
self.stopwatch.reset();
|
||||
self.finished = self.just_finished();
|
||||
}
|
||||
self.repeating = repeating
|
||||
self.repeating = repeating;
|
||||
}
|
||||
|
||||
/// Advance the timer by `delta` seconds.
|
||||
|
|
|
@ -334,7 +334,7 @@ pub fn extract_clear_color(clear_color: Res<ClearColor>, mut render_world: ResMu
|
|||
// If the clear color has changed
|
||||
if clear_color.is_changed() {
|
||||
// Update the clear color resource in the render world
|
||||
render_world.insert_resource(clear_color.clone())
|
||||
render_world.insert_resource(clear_color.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ impl Node for MainPass2dNode {
|
|||
|
||||
let mut draw_functions = draw_functions.write();
|
||||
let mut tracked_pass = TrackedRenderPass::new(render_pass);
|
||||
for item in transparent_phase.items.iter() {
|
||||
for item in &transparent_phase.items {
|
||||
let draw_function = draw_functions.get_mut(item.draw_function).unwrap();
|
||||
draw_function.draw(world, &mut tracked_pass, view_entity, item);
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ impl Node for MainPass3dNode {
|
|||
.begin_render_pass(&pass_descriptor);
|
||||
let mut draw_functions = draw_functions.write();
|
||||
let mut tracked_pass = TrackedRenderPass::new(render_pass);
|
||||
for item in opaque_phase.items.iter() {
|
||||
for item in &opaque_phase.items {
|
||||
let draw_function = draw_functions.get_mut(item.draw_function).unwrap();
|
||||
draw_function.draw(world, &mut tracked_pass, view_entity, item);
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ impl Node for MainPass3dNode {
|
|||
.begin_render_pass(&pass_descriptor);
|
||||
let mut draw_functions = draw_functions.write();
|
||||
let mut tracked_pass = TrackedRenderPass::new(render_pass);
|
||||
for item in alpha_mask_phase.items.iter() {
|
||||
for item in &alpha_mask_phase.items {
|
||||
let draw_function = draw_functions.get_mut(item.draw_function).unwrap();
|
||||
draw_function.draw(world, &mut tracked_pass, view_entity, item);
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ impl Node for MainPass3dNode {
|
|||
.begin_render_pass(&pass_descriptor);
|
||||
let mut draw_functions = draw_functions.write();
|
||||
let mut tracked_pass = TrackedRenderPass::new(render_pass);
|
||||
for item in transparent_phase.items.iter() {
|
||||
for item in &transparent_phase.items {
|
||||
let draw_function = draw_functions.get_mut(item.draw_function).unwrap();
|
||||
draw_function.draw(world, &mut tracked_pass, view_entity, item);
|
||||
}
|
||||
|
|
|
@ -4,9 +4,10 @@ use syn::{parse_macro_input, ItemFn};
|
|||
|
||||
pub fn bevy_main(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(item as ItemFn);
|
||||
if input.sig.ident != "main" {
|
||||
panic!("`bevy_main` can only be used on a function called 'main'.")
|
||||
}
|
||||
assert!(
|
||||
input.sig.ident == "main",
|
||||
"`bevy_main` can only be used on a function called 'main'.",
|
||||
);
|
||||
|
||||
TokenStream::from(quote! {
|
||||
#[no_mangle]
|
||||
|
|
|
@ -30,5 +30,5 @@ pub fn derive_app_label(input: TokenStream) -> TokenStream {
|
|||
let input = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let mut trait_path = BevyManifest::default().get_path("bevy_app");
|
||||
trait_path.segments.push(format_ident!("AppLabel").into());
|
||||
derive_label(input, trait_path)
|
||||
derive_label(input, &trait_path)
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ impl Diagnostic {
|
|||
"Diagnostic {:?} has name longer than {} characters, and so might overflow in the LogDiagnosticsPlugin\
|
||||
Consider using a shorter name.",
|
||||
name, MAX_DIAGNOSTIC_NAME_WIDTH
|
||||
)
|
||||
);
|
||||
}
|
||||
Diagnostic {
|
||||
id,
|
||||
|
@ -77,6 +77,7 @@ impl Diagnostic {
|
|||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_suffix(mut self, suffix: impl Into<Cow<'static, str>>) -> Self {
|
||||
self.suffix = suffix.into();
|
||||
self
|
||||
|
|
|
@ -13,7 +13,7 @@ pub fn derive_component(input: TokenStream) -> TokenStream {
|
|||
Err(e) => return e.into_compile_error().into(),
|
||||
};
|
||||
|
||||
let storage = storage_path(bevy_ecs_path.clone(), attrs.storage);
|
||||
let storage = storage_path(&bevy_ecs_path, attrs.storage);
|
||||
|
||||
ast.generics
|
||||
.make_where_clause()
|
||||
|
@ -96,7 +96,7 @@ fn parse_component_attr(ast: &DeriveInput) -> Result<Attrs> {
|
|||
Ok(attrs)
|
||||
}
|
||||
|
||||
fn storage_path(bevy_ecs_path: Path, ty: StorageTy) -> TokenStream2 {
|
||||
fn storage_path(bevy_ecs_path: &Path, ty: StorageTy) -> TokenStream2 {
|
||||
let typename = match ty {
|
||||
StorageTy::Table => Ident::new("TableStorage", Span::call_site()),
|
||||
StorageTy::SparseSet => Ident::new("SparseStorage", Span::call_site()),
|
||||
|
|
|
@ -433,7 +433,7 @@ pub fn derive_system_label(input: TokenStream) -> TokenStream {
|
|||
trait_path
|
||||
.segments
|
||||
.push(format_ident!("SystemLabel").into());
|
||||
derive_label(input, trait_path)
|
||||
derive_label(input, &trait_path)
|
||||
}
|
||||
|
||||
#[proc_macro_derive(StageLabel)]
|
||||
|
@ -442,7 +442,7 @@ pub fn derive_stage_label(input: TokenStream) -> TokenStream {
|
|||
let mut trait_path = bevy_ecs_path();
|
||||
trait_path.segments.push(format_ident!("schedule").into());
|
||||
trait_path.segments.push(format_ident!("StageLabel").into());
|
||||
derive_label(input, trait_path)
|
||||
derive_label(input, &trait_path)
|
||||
}
|
||||
|
||||
#[proc_macro_derive(AmbiguitySetLabel)]
|
||||
|
@ -453,7 +453,7 @@ pub fn derive_ambiguity_set_label(input: TokenStream) -> TokenStream {
|
|||
trait_path
|
||||
.segments
|
||||
.push(format_ident!("AmbiguitySetLabel").into());
|
||||
derive_label(input, trait_path)
|
||||
derive_label(input, &trait_path)
|
||||
}
|
||||
|
||||
#[proc_macro_derive(RunCriteriaLabel)]
|
||||
|
@ -464,7 +464,7 @@ pub fn derive_run_criteria_label(input: TokenStream) -> TokenStream {
|
|||
trait_path
|
||||
.segments
|
||||
.push(format_ident!("RunCriteriaLabel").into());
|
||||
derive_label(input, trait_path)
|
||||
derive_label(input, &trait_path)
|
||||
}
|
||||
|
||||
pub(crate) fn bevy_ecs_path() -> syn::Path {
|
||||
|
|
|
@ -520,7 +520,7 @@ impl Archetypes {
|
|||
}
|
||||
|
||||
pub fn clear_entities(&mut self) {
|
||||
for archetype in self.archetypes.iter_mut() {
|
||||
for archetype in &mut self.archetypes {
|
||||
archetype.clear_entities();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -489,7 +489,7 @@ impl<'a, 'b> BundleInserter<'a, 'b> {
|
|||
{
|
||||
&mut *self.archetype
|
||||
} else if new_archetype.id() == swapped_location.archetype_id {
|
||||
&mut *new_archetype
|
||||
new_archetype
|
||||
} else {
|
||||
// SAFE: the only two borrowed archetypes are above and we just did collision checks
|
||||
&mut *self
|
||||
|
@ -630,9 +630,11 @@ unsafe fn initialize_bundle(
|
|||
let mut deduped = component_ids.clone();
|
||||
deduped.sort();
|
||||
deduped.dedup();
|
||||
if deduped.len() != component_ids.len() {
|
||||
panic!("Bundle {} has duplicate components", bundle_type_name);
|
||||
}
|
||||
assert!(
|
||||
deduped.len() == component_ids.len(),
|
||||
"Bundle {} has duplicate components",
|
||||
bundle_type_name
|
||||
);
|
||||
|
||||
BundleInfo {
|
||||
id,
|
||||
|
|
|
@ -179,7 +179,7 @@ pub struct ComponentDescriptor {
|
|||
impl ComponentDescriptor {
|
||||
// SAFETY: The pointer points to a valid value of type `T` and it is safe to drop this value.
|
||||
unsafe fn drop_ptr<T>(x: *mut u8) {
|
||||
x.cast::<T>().drop_in_place()
|
||||
x.cast::<T>().drop_in_place();
|
||||
}
|
||||
|
||||
pub fn new<T: Component>() -> Self {
|
||||
|
|
|
@ -540,7 +540,7 @@ impl Entities {
|
|||
unsafe {
|
||||
self.flush(|_entity, location| {
|
||||
location.archetype_id = ArchetypeId::INVALID;
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -612,7 +612,7 @@ mod tests {
|
|||
unsafe {
|
||||
entities.flush(|_entity, _location| {
|
||||
// do nothing ... leaving entity location invalid
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
assert!(entities.contains(e));
|
||||
|
|
|
@ -250,8 +250,8 @@ fn internal_event_reader<'a, T>(
|
|||
} else {
|
||||
0
|
||||
};
|
||||
let a = events.events_a.get(a_index..).unwrap_or_else(|| &[]);
|
||||
let b = events.events_b.get(b_index..).unwrap_or_else(|| &[]);
|
||||
let a = events.events_a.get(a_index..).unwrap_or_default();
|
||||
let b = events.events_b.get(b_index..).unwrap_or_default();
|
||||
let unread_count = a.len() + b.len();
|
||||
*last_event_count = events.event_count - unread_count;
|
||||
let iterator = match events.state {
|
||||
|
|
|
@ -382,7 +382,7 @@ mod tests {
|
|||
world
|
||||
.query::<(Entity, &A)>()
|
||||
.par_for_each(&world, &task_pool, 2, |(e, &A(i))| {
|
||||
results.lock().push((e, i))
|
||||
results.lock().push((e, i));
|
||||
});
|
||||
results.lock().sort();
|
||||
assert_eq!(
|
||||
|
|
|
@ -212,7 +212,7 @@ impl<T: SparseSetIndex> FilteredAccessSet<T> {
|
|||
// if combined unfiltered access is incompatible, check each filtered access for
|
||||
// compatibility
|
||||
if !filtered_access.access.is_compatible(&self.combined_access) {
|
||||
for current_filtered_access in self.filtered_accesses.iter() {
|
||||
for current_filtered_access in &self.filtered_accesses {
|
||||
if !current_filtered_access.is_compatible(filtered_access) {
|
||||
return current_filtered_access
|
||||
.access
|
||||
|
|
|
@ -250,11 +250,12 @@ unsafe impl<T: Component> FetchState for ReadState<T> {
|
|||
}
|
||||
|
||||
fn update_component_access(&self, access: &mut FilteredAccess<ComponentId>) {
|
||||
if access.access().has_write(self.component_id) {
|
||||
panic!("&{} conflicts with a previous access in this query. Shared access cannot coincide with exclusive access.",
|
||||
std::any::type_name::<T>());
|
||||
}
|
||||
access.add_read(self.component_id)
|
||||
assert!(
|
||||
!access.access().has_write(self.component_id),
|
||||
"&{} conflicts with a previous access in this query. Shared access cannot coincide with exclusive access.",
|
||||
std::any::type_name::<T>(),
|
||||
);
|
||||
access.add_read(self.component_id);
|
||||
}
|
||||
|
||||
fn update_archetype_component_access(
|
||||
|
@ -452,10 +453,11 @@ unsafe impl<T: Component> FetchState for WriteState<T> {
|
|||
}
|
||||
|
||||
fn update_component_access(&self, access: &mut FilteredAccess<ComponentId>) {
|
||||
if access.access().has_read(self.component_id) {
|
||||
panic!("&mut {} conflicts with a previous access in this query. Mutable component access must be unique.",
|
||||
std::any::type_name::<T>());
|
||||
}
|
||||
assert!(
|
||||
!access.access().has_read(self.component_id),
|
||||
"&mut {} conflicts with a previous access in this query. Mutable component access must be unique.",
|
||||
std::any::type_name::<T>(),
|
||||
);
|
||||
access.add_write(self.component_id);
|
||||
}
|
||||
|
||||
|
@ -551,7 +553,7 @@ impl<'w, 's, T: Component> Fetch<'w, 's> for WriteFetch<T> {
|
|||
Mut {
|
||||
value: &mut *self.table_components.as_ptr().add(table_row),
|
||||
ticks: Ticks {
|
||||
component_ticks: &mut *(&*self.table_ticks.add(table_row)).get(),
|
||||
component_ticks: &mut *(*self.table_ticks.add(table_row)).get(),
|
||||
change_tick: self.change_tick,
|
||||
last_change_tick: self.last_change_tick,
|
||||
},
|
||||
|
@ -578,7 +580,7 @@ impl<'w, 's, T: Component> Fetch<'w, 's> for WriteFetch<T> {
|
|||
Mut {
|
||||
value: &mut *self.table_components.as_ptr().add(table_row),
|
||||
ticks: Ticks {
|
||||
component_ticks: &mut *(&*self.table_ticks.add(table_row)).get(),
|
||||
component_ticks: &mut *(*self.table_ticks.add(table_row)).get(),
|
||||
change_tick: self.change_tick,
|
||||
last_change_tick: self.last_change_tick,
|
||||
},
|
||||
|
@ -705,7 +707,7 @@ unsafe impl<T: FetchState> FetchState for OptionState<T> {
|
|||
) {
|
||||
if self.state.matches_archetype(archetype) {
|
||||
self.state
|
||||
.update_archetype_component_access(archetype, access)
|
||||
.update_archetype_component_access(archetype, access);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -864,11 +866,12 @@ unsafe impl<T: Component> FetchState for ChangeTrackersState<T> {
|
|||
}
|
||||
|
||||
fn update_component_access(&self, access: &mut FilteredAccess<ComponentId>) {
|
||||
if access.access().has_write(self.component_id) {
|
||||
panic!("ChangeTrackers<{}> conflicts with a previous access in this query. Shared access cannot coincide with exclusive access.",
|
||||
std::any::type_name::<T>());
|
||||
}
|
||||
access.add_read(self.component_id)
|
||||
assert!(
|
||||
!access.access().has_write(self.component_id),
|
||||
"ChangeTrackers<{}> conflicts with a previous access in this query. Shared access cannot coincide with exclusive access.",
|
||||
std::any::type_name::<T>()
|
||||
);
|
||||
access.add_read(self.component_id);
|
||||
}
|
||||
|
||||
fn update_archetype_component_access(
|
||||
|
@ -989,7 +992,7 @@ impl<'w, 's, T: Component> Fetch<'w, 's> for ChangeTrackersFetch<T> {
|
|||
StorageType::Table => {
|
||||
let table_row = *self.entity_table_rows.add(archetype_index);
|
||||
ChangeTrackers {
|
||||
component_ticks: (&*self.table_ticks.add(table_row)).clone(),
|
||||
component_ticks: (*self.table_ticks.add(table_row)).clone(),
|
||||
marker: PhantomData,
|
||||
last_change_tick: self.last_change_tick,
|
||||
change_tick: self.change_tick,
|
||||
|
@ -998,7 +1001,7 @@ impl<'w, 's, T: Component> Fetch<'w, 's> for ChangeTrackersFetch<T> {
|
|||
StorageType::SparseSet => {
|
||||
let entity = *self.entities.add(archetype_index);
|
||||
ChangeTrackers {
|
||||
component_ticks: (&*self.sparse_set).get_ticks(entity).cloned().unwrap(),
|
||||
component_ticks: (*self.sparse_set).get_ticks(entity).cloned().unwrap(),
|
||||
marker: PhantomData,
|
||||
last_change_tick: self.last_change_tick,
|
||||
change_tick: self.change_tick,
|
||||
|
@ -1010,7 +1013,7 @@ impl<'w, 's, T: Component> Fetch<'w, 's> for ChangeTrackersFetch<T> {
|
|||
#[inline]
|
||||
unsafe fn table_fetch(&mut self, table_row: usize) -> Self::Item {
|
||||
ChangeTrackers {
|
||||
component_ticks: (&*self.table_ticks.add(table_row)).clone(),
|
||||
component_ticks: (*self.table_ticks.add(table_row)).clone(),
|
||||
marker: PhantomData,
|
||||
last_change_tick: self.last_change_tick,
|
||||
change_tick: self.change_tick,
|
||||
|
|
|
@ -101,10 +101,11 @@ where
|
|||
|
||||
#[inline]
|
||||
pub fn validate_world(&self, world: &World) {
|
||||
if world.id() != self.world_id {
|
||||
panic!("Attempted to use {} with a mismatched World. QueryStates can only be used with the World they were created from.",
|
||||
std::any::type_name::<Self>());
|
||||
}
|
||||
assert!(
|
||||
world.id() == self.world_id,
|
||||
"Attempted to use {} with a mismatched World. QueryStates can only be used with the World they were created from.",
|
||||
std::any::type_name::<Self>(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates a new [`Archetype`].
|
||||
|
@ -604,7 +605,7 @@ where
|
|||
<F::Fetch as Fetch>::init(world, &self.filter_state, last_change_tick, change_tick);
|
||||
if Q::Fetch::IS_DENSE && F::Fetch::IS_DENSE {
|
||||
let tables = &world.storages().tables;
|
||||
for table_id in self.matched_table_ids.iter() {
|
||||
for table_id in &self.matched_table_ids {
|
||||
let table = &tables[*table_id];
|
||||
fetch.set_table(&self.fetch_state, table);
|
||||
filter.set_table(&self.filter_state, table);
|
||||
|
@ -620,7 +621,7 @@ where
|
|||
} else {
|
||||
let archetypes = &world.archetypes;
|
||||
let tables = &world.storages().tables;
|
||||
for archetype_id in self.matched_archetype_ids.iter() {
|
||||
for archetype_id in &self.matched_archetype_ids {
|
||||
let archetype = &archetypes[*archetype_id];
|
||||
fetch.set_archetype(&self.fetch_state, archetype, tables);
|
||||
filter.set_archetype(&self.filter_state, archetype, tables);
|
||||
|
@ -664,7 +665,7 @@ where
|
|||
task_pool.scope(|scope| {
|
||||
if QF::IS_DENSE && F::Fetch::IS_DENSE {
|
||||
let tables = &world.storages().tables;
|
||||
for table_id in self.matched_table_ids.iter() {
|
||||
for table_id in &self.matched_table_ids {
|
||||
let table = &tables[*table_id];
|
||||
let mut offset = 0;
|
||||
while offset < table.len() {
|
||||
|
@ -696,7 +697,7 @@ where
|
|||
}
|
||||
} else {
|
||||
let archetypes = &world.archetypes;
|
||||
for archetype_id in self.matched_archetype_ids.iter() {
|
||||
for archetype_id in &self.matched_archetype_ids {
|
||||
let mut offset = 0;
|
||||
let archetype = &archetypes[*archetype_id];
|
||||
while offset < archetype.len() {
|
||||
|
|
|
@ -373,7 +373,7 @@ mod tests {
|
|||
assert_eq!(
|
||||
receive_events(&world),
|
||||
vec![StartedSystems(3), StartedSystems(3),]
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -45,12 +45,14 @@ pub struct Schedule {
|
|||
|
||||
impl Schedule {
|
||||
/// Similar to [`add_stage`](Self::add_stage), but it also returns itself.
|
||||
#[must_use]
|
||||
pub fn with_stage<S: Stage>(mut self, label: impl StageLabel, stage: S) -> Self {
|
||||
self.add_stage(label, stage);
|
||||
self
|
||||
}
|
||||
|
||||
/// Similar to [`add_stage_after`](Self::add_stage_after), but it also returns itself.
|
||||
#[must_use]
|
||||
pub fn with_stage_after<S: Stage>(
|
||||
mut self,
|
||||
target: impl StageLabel,
|
||||
|
@ -62,6 +64,7 @@ impl Schedule {
|
|||
}
|
||||
|
||||
/// Similar to [`add_stage_before`](Self::add_stage_before), but it also returns itself.
|
||||
#[must_use]
|
||||
pub fn with_stage_before<S: Stage>(
|
||||
mut self,
|
||||
target: impl StageLabel,
|
||||
|
@ -72,12 +75,14 @@ impl Schedule {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_run_criteria<S: System<In = (), Out = ShouldRun>>(mut self, system: S) -> Self {
|
||||
self.set_run_criteria(system);
|
||||
self
|
||||
}
|
||||
|
||||
/// Similar to [`add_system_to_stage`](Self::add_system_to_stage), but it also returns itself.
|
||||
#[must_use]
|
||||
pub fn with_system_in_stage<Params>(
|
||||
mut self,
|
||||
stage_label: impl StageLabel,
|
||||
|
@ -109,9 +114,7 @@ impl Schedule {
|
|||
let label: Box<dyn StageLabel> = Box::new(label);
|
||||
self.stage_order.push(label.clone());
|
||||
let prev = self.stages.insert(label.clone(), Box::new(stage));
|
||||
if prev.is_some() {
|
||||
panic!("Stage already exists: {:?}.", label);
|
||||
}
|
||||
assert!(prev.is_none(), "Stage already exists: {:?}.", label);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -144,9 +147,7 @@ impl Schedule {
|
|||
|
||||
self.stage_order.insert(target_index + 1, label.clone());
|
||||
let prev = self.stages.insert(label.clone(), Box::new(stage));
|
||||
if prev.is_some() {
|
||||
panic!("Stage already exists: {:?}.", label);
|
||||
}
|
||||
assert!(prev.is_none(), "Stage already exists: {:?}.", label);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -180,9 +181,7 @@ impl Schedule {
|
|||
|
||||
self.stage_order.insert(target_index, label.clone());
|
||||
let prev = self.stages.insert(label.clone(), Box::new(stage));
|
||||
if prev.is_some() {
|
||||
panic!("Stage already exists: {:?}.", label);
|
||||
}
|
||||
assert!(prev.is_none(), "Stage already exists: {:?}.", label);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -335,7 +334,7 @@ impl Schedule {
|
|||
|
||||
/// Executes each [`Stage`] contained in the schedule, one at a time.
|
||||
pub fn run_once(&mut self, world: &mut World) {
|
||||
for label in self.stage_order.iter() {
|
||||
for label in &self.stage_order {
|
||||
#[cfg(feature = "trace")]
|
||||
let stage_span = bevy_utils::tracing::info_span!("stage", name = ?label);
|
||||
#[cfg(feature = "trace")]
|
||||
|
|
|
@ -137,6 +137,7 @@ impl SystemStage {
|
|||
self.executor = executor;
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_system<Params>(mut self, system: impl IntoSystemDescriptor<Params>) -> Self {
|
||||
self.add_system(system);
|
||||
self
|
||||
|
@ -208,7 +209,7 @@ impl SystemStage {
|
|||
}
|
||||
|
||||
pub fn apply_buffers(&mut self, world: &mut World) {
|
||||
for container in self.parallel.iter_mut() {
|
||||
for container in &mut self.parallel {
|
||||
let system = container.system_mut();
|
||||
#[cfg(feature = "trace")]
|
||||
let span = bevy_utils::tracing::info_span!("system_commands", name = &*system.name());
|
||||
|
@ -251,6 +252,7 @@ impl SystemStage {
|
|||
&self.exclusive_before_commands
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_system_set(mut self, system_set: SystemSet) -> Self {
|
||||
self.add_system_set(system_set);
|
||||
self
|
||||
|
@ -261,7 +263,7 @@ impl SystemStage {
|
|||
let (run_criteria, mut systems) = system_set.bake();
|
||||
let set_run_criteria_index = run_criteria.and_then(|criteria| {
|
||||
// validate that no systems have criteria
|
||||
for system in systems.iter_mut() {
|
||||
for system in &mut systems {
|
||||
if let Some(name) = match system {
|
||||
SystemDescriptor::Exclusive(descriptor) => descriptor
|
||||
.run_criteria
|
||||
|
@ -285,15 +287,15 @@ impl SystemStage {
|
|||
Some(self.add_run_criteria_internal(descriptor))
|
||||
}
|
||||
RunCriteriaDescriptorOrLabel::Label(label) => {
|
||||
for system in systems.iter_mut() {
|
||||
for system in &mut systems {
|
||||
match system {
|
||||
SystemDescriptor::Exclusive(descriptor) => {
|
||||
descriptor.run_criteria =
|
||||
Some(RunCriteriaDescriptorOrLabel::Label(label.clone()))
|
||||
Some(RunCriteriaDescriptorOrLabel::Label(label.clone()));
|
||||
}
|
||||
SystemDescriptor::Parallel(descriptor) => {
|
||||
descriptor.run_criteria =
|
||||
Some(RunCriteriaDescriptorOrLabel::Label(label.clone()))
|
||||
Some(RunCriteriaDescriptorOrLabel::Label(label.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -308,6 +310,7 @@ impl SystemStage {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_run_criteria<Param, S: IntoSystem<(), ShouldRun, Param>>(
|
||||
mut self,
|
||||
system: S,
|
||||
|
@ -325,6 +328,7 @@ impl SystemStage {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_system_run_criteria(mut self, run_criteria: RunCriteriaDescriptor) -> Self {
|
||||
self.add_system_run_criteria(run_criteria);
|
||||
self
|
||||
|
@ -612,7 +616,7 @@ impl SystemStage {
|
|||
.map(|label| (label.clone(), order_inverted[index].0))
|
||||
})
|
||||
.collect();
|
||||
for criteria in self.run_criteria.iter_mut() {
|
||||
for criteria in &mut self.run_criteria {
|
||||
if let RunCriteriaInner::Piped { input: parent, .. } = &mut criteria.inner {
|
||||
let label = &criteria.after[0];
|
||||
*parent = *labels.get(label).unwrap_or_else(|| {
|
||||
|
@ -745,7 +749,7 @@ fn find_ambiguities(systems: &[impl SystemContainer]) -> Vec<(usize, usize, Vec<
|
|||
if let (Some(a), Some(b)) = (a_access, b_access) {
|
||||
let conflicts = a.get_conflicts(b);
|
||||
if !conflicts.is_empty() {
|
||||
ambiguities.push((index_a, index_b, conflicts))
|
||||
ambiguities.push((index_a, index_b, conflicts));
|
||||
}
|
||||
} else {
|
||||
ambiguities.push((index_a, index_b, Vec::new()));
|
||||
|
@ -909,7 +913,7 @@ impl Stage for SystemStage {
|
|||
ShouldRun::YesAndCheckAgain | ShouldRun::NoAndCheckAgain => {
|
||||
match &mut criteria.inner {
|
||||
RunCriteriaInner::Single(system) => {
|
||||
criteria.should_run = system.run((), world)
|
||||
criteria.should_run = system.run((), world);
|
||||
}
|
||||
RunCriteriaInner::Piped {
|
||||
input: parent,
|
||||
|
@ -917,7 +921,7 @@ impl Stage for SystemStage {
|
|||
..
|
||||
} => {
|
||||
criteria.should_run =
|
||||
system.run(run_criteria[*parent].should_run, world)
|
||||
system.run(run_criteria[*parent].should_run, world);
|
||||
}
|
||||
}
|
||||
match criteria.should_run {
|
||||
|
@ -1395,7 +1399,7 @@ mod tests {
|
|||
.with_system(make_parallel(4).label("4").after("3"))
|
||||
.with_system(make_parallel(3).label("3").after("2").before("4"));
|
||||
stage.run(&mut world);
|
||||
for container in stage.parallel.iter() {
|
||||
for container in &stage.parallel {
|
||||
assert!(container.dependencies().len() <= 1);
|
||||
}
|
||||
stage.set_executor(Box::new(SingleThreadedExecutor::default()));
|
||||
|
@ -2092,7 +2096,7 @@ mod tests {
|
|||
// just wrapped over
|
||||
(u32::MAX / 2, 0, vec![ids[0], ids[3], ids[4]]),
|
||||
];
|
||||
for (last_change_tick, change_tick, changed_entities) in test_cases.iter() {
|
||||
for (last_change_tick, change_tick, changed_entities) in &test_cases {
|
||||
*world.change_tick.get_mut() = *change_tick;
|
||||
world.last_change_tick = *last_change_tick;
|
||||
|
||||
|
|
|
@ -70,31 +70,37 @@ impl SystemSet {
|
|||
Self::new().with_run_criteria(State::<T>::on_resume(s))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn in_ambiguity_set(mut self, set: impl AmbiguitySetLabel) -> Self {
|
||||
self.ambiguity_sets.push(Box::new(set));
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_system<Params>(mut self, system: impl IntoSystemDescriptor<Params>) -> Self {
|
||||
self.systems.push(system.into_descriptor());
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_run_criteria<Marker>(mut self, run_criteria: impl IntoRunCriteria<Marker>) -> Self {
|
||||
self.run_criteria = Some(run_criteria.into());
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn label(mut self, label: impl SystemLabel) -> Self {
|
||||
self.labels.push(Box::new(label));
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn before(mut self, label: impl SystemLabel) -> Self {
|
||||
self.before.push(Box::new(label));
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn after(mut self, label: impl SystemLabel) -> Self {
|
||||
self.after.push(Box::new(label));
|
||||
self
|
||||
|
|
|
@ -180,7 +180,7 @@ impl BlobVec {
|
|||
pub unsafe fn swap_remove_and_drop_unchecked(&mut self, index: usize) {
|
||||
debug_assert!(index < self.len());
|
||||
let value = self.swap_remove_and_forget_unchecked(index);
|
||||
(self.drop)(value)
|
||||
(self.drop)(value);
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
|
@ -291,7 +291,7 @@ mod tests {
|
|||
|
||||
// SAFETY: The pointer points to a valid value of type `T` and it is safe to drop this value.
|
||||
unsafe fn drop_ptr<T>(x: *mut u8) {
|
||||
x.cast::<T>().drop_in_place()
|
||||
x.cast::<T>().drop_in_place();
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
|
|
|
@ -466,7 +466,7 @@ mod tests {
|
|||
|
||||
{
|
||||
let iter_results = set.values().collect::<Vec<_>>();
|
||||
assert_eq!(iter_results, vec![&Foo(1), &Foo(2), &Foo(3)])
|
||||
assert_eq!(iter_results, vec![&Foo(1), &Foo(2), &Foo(3)]);
|
||||
}
|
||||
|
||||
assert_eq!(set.remove(e2), Some(Foo(2)));
|
||||
|
|
|
@ -221,7 +221,7 @@ impl Table {
|
|||
self.columns.insert(
|
||||
component_info.id(),
|
||||
Column::with_capacity(component_info, self.entities.capacity()),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// Removes the entity at the given row and returns the entity swapped in to replace it (if an
|
||||
|
@ -495,13 +495,13 @@ impl Tables {
|
|||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
for table in self.tables.iter_mut() {
|
||||
for table in &mut self.tables {
|
||||
table.clear();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn check_change_ticks(&mut self, change_tick: u32) {
|
||||
for table in self.tables.iter_mut() {
|
||||
for table in &mut self.tables {
|
||||
table.check_change_ticks(change_tick);
|
||||
}
|
||||
}
|
||||
|
@ -541,7 +541,7 @@ mod tests {
|
|||
let mut table = Table::with_capacity(0, columns.len());
|
||||
table.add_column(components.get_info(component_id).unwrap());
|
||||
let entities = (0..200).map(Entity::from_raw).collect::<Vec<_>>();
|
||||
for entity in entities.iter() {
|
||||
for entity in &entities {
|
||||
// SAFE: we allocate and immediately set data afterwards
|
||||
unsafe {
|
||||
let row = table.allocate(*entity);
|
||||
|
|
|
@ -107,7 +107,7 @@ impl<'w, 's> Commands<'w, 's> {
|
|||
/// Spawns a [`Bundle`] without pre-allocating an [`Entity`]. The [`Entity`] will be allocated
|
||||
/// when this [`Command`] is applied.
|
||||
pub fn spawn_and_forget(&mut self, bundle: impl Bundle) {
|
||||
self.queue.push(Spawn { bundle })
|
||||
self.queue.push(Spawn { bundle });
|
||||
}
|
||||
|
||||
/// Creates a new entity with the components contained in `bundle`.
|
||||
|
@ -292,7 +292,7 @@ impl<'w, 's> Commands<'w, 's> {
|
|||
pub fn init_resource<R: Resource + FromWorld>(&mut self) {
|
||||
self.queue.push(InitResource::<R> {
|
||||
_phantom: PhantomData::<R>::default(),
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/// Inserts a resource to the [`World`], overwriting any previous value of the same type.
|
||||
|
@ -320,7 +320,7 @@ impl<'w, 's> Commands<'w, 's> {
|
|||
/// # bevy_ecs::system::assert_is_system(system);
|
||||
/// ```
|
||||
pub fn insert_resource<R: Resource>(&mut self, resource: R) {
|
||||
self.queue.push(InsertResource { resource })
|
||||
self.queue.push(InsertResource { resource });
|
||||
}
|
||||
|
||||
/// Removes a resource from the [`World`].
|
||||
|
@ -571,7 +571,7 @@ impl<'w, 's, 'a> EntityCommands<'w, 's, 'a> {
|
|||
pub fn despawn(&mut self) {
|
||||
self.commands.add(Despawn {
|
||||
entity: self.entity,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/// Returns the underlying [`Commands`].
|
||||
|
@ -585,7 +585,7 @@ where
|
|||
F: FnOnce(&mut World) + Send + Sync + 'static,
|
||||
{
|
||||
fn write(self, world: &mut World) {
|
||||
self(world)
|
||||
self(world);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -353,6 +353,7 @@ impl<In, Out, Param: SystemParam, Marker, F> FunctionSystem<In, Out, Param, Mark
|
|||
/// system.initialize(world);
|
||||
/// system.run((), world);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn config(
|
||||
mut self,
|
||||
f: impl FnOnce(&mut <Param::Fetch as SystemParamState>::Config),
|
||||
|
|
|
@ -364,27 +364,27 @@ mod tests {
|
|||
#[should_panic]
|
||||
fn conflicting_system_resources() {
|
||||
fn sys(_: ResMut<BufferRes>, _: Res<BufferRes>) {}
|
||||
test_for_conflicting_resources(sys)
|
||||
test_for_conflicting_resources(sys);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn conflicting_system_resources_reverse_order() {
|
||||
fn sys(_: Res<BufferRes>, _: ResMut<BufferRes>) {}
|
||||
test_for_conflicting_resources(sys)
|
||||
test_for_conflicting_resources(sys);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn conflicting_system_resources_multiple_mutable() {
|
||||
fn sys(_: ResMut<BufferRes>, _: ResMut<BufferRes>) {}
|
||||
test_for_conflicting_resources(sys)
|
||||
test_for_conflicting_resources(sys);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonconflicting_system_resources() {
|
||||
fn sys(_: Local<BufferRes>, _: ResMut<BufferRes>, _: Local<A>, _: ResMut<A>) {}
|
||||
test_for_conflicting_resources(sys)
|
||||
test_for_conflicting_resources(sys);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -564,7 +564,7 @@ mod tests {
|
|||
let bundle_info = bundles.get(bundle_id).unwrap();
|
||||
let mut bundle_components = bundle_info.components().to_vec();
|
||||
bundle_components.sort();
|
||||
for component_id in bundle_components.iter() {
|
||||
for component_id in &bundle_components {
|
||||
assert!(
|
||||
components.get_info(*component_id).is_some(),
|
||||
"every bundle component exists in Components"
|
||||
|
|
|
@ -464,7 +464,7 @@ where
|
|||
f,
|
||||
self.last_change_tick,
|
||||
self.change_tick,
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -499,7 +499,7 @@ where
|
|||
f,
|
||||
self.last_change_tick,
|
||||
self.change_tick,
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -525,7 +525,7 @@ where
|
|||
f,
|
||||
self.last_change_tick,
|
||||
self.change_tick,
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -547,7 +547,7 @@ where
|
|||
f,
|
||||
self.last_change_tick,
|
||||
self.change_tick,
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -299,11 +299,12 @@ unsafe impl<T: Resource> SystemParamState for ResState<T> {
|
|||
fn init(world: &mut World, system_meta: &mut SystemMeta, _config: Self::Config) -> Self {
|
||||
let component_id = world.initialize_resource::<T>();
|
||||
let combined_access = system_meta.component_access_set.combined_access_mut();
|
||||
if combined_access.has_write(component_id) {
|
||||
panic!(
|
||||
"error[B0002]: Res<{}> in system {} conflicts with a previous ResMut<{0}> access. Consider removing the duplicate access.",
|
||||
std::any::type_name::<T>(), system_meta.name);
|
||||
}
|
||||
assert!(
|
||||
!combined_access.has_write(component_id),
|
||||
"error[B0002]: Res<{}> in system {} conflicts with a previous ResMut<{0}> access. Consider removing the duplicate access.",
|
||||
std::any::type_name::<T>(),
|
||||
system_meta.name,
|
||||
);
|
||||
combined_access.add_read(component_id);
|
||||
|
||||
let resource_archetype = world.archetypes.resource();
|
||||
|
@ -863,11 +864,12 @@ unsafe impl<T: 'static> SystemParamState for NonSendState<T> {
|
|||
|
||||
let component_id = world.initialize_non_send_resource::<T>();
|
||||
let combined_access = system_meta.component_access_set.combined_access_mut();
|
||||
if combined_access.has_write(component_id) {
|
||||
panic!(
|
||||
"error[B0002]: NonSend<{}> in system {} conflicts with a previous mutable resource access ({0}). Consider removing the duplicate access.",
|
||||
std::any::type_name::<T>(), system_meta.name);
|
||||
}
|
||||
assert!(
|
||||
!combined_access.has_write(component_id),
|
||||
"error[B0002]: NonSend<{}> in system {} conflicts with a previous mutable resource access ({0}). Consider removing the duplicate access.",
|
||||
std::any::type_name::<T>(),
|
||||
system_meta.name,
|
||||
);
|
||||
combined_access.add_read(component_id);
|
||||
|
||||
let resource_archetype = world.archetypes.resource();
|
||||
|
|
|
@ -391,7 +391,7 @@ impl<'w> EntityMut<'w> {
|
|||
archetypes,
|
||||
storages,
|
||||
new_archetype_id,
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -722,7 +722,7 @@ fn sorted_remove<T: Eq + Ord + Copy>(source: &mut Vec<T>, remove: &[T]) {
|
|||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -897,24 +897,21 @@ impl World {
|
|||
};
|
||||
}
|
||||
AllocAtWithoutReplacement::DidNotExist => {
|
||||
match spawn_or_insert {
|
||||
SpawnOrInsert::Spawn(ref mut spawner) => {
|
||||
// SAFE: `entity` is allocated (but non existent), bundle matches inserter
|
||||
unsafe { spawner.spawn_non_existent(entity, bundle) };
|
||||
}
|
||||
_ => {
|
||||
let mut spawner = bundle_info.get_bundle_spawner(
|
||||
&mut self.entities,
|
||||
&mut self.archetypes,
|
||||
&mut self.components,
|
||||
&mut self.storages,
|
||||
change_tick,
|
||||
);
|
||||
// SAFE: `entity` is valid, `location` matches entity, bundle matches inserter
|
||||
unsafe { spawner.spawn_non_existent(entity, bundle) };
|
||||
spawn_or_insert = SpawnOrInsert::Spawn(spawner);
|
||||
}
|
||||
};
|
||||
if let SpawnOrInsert::Spawn(ref mut spawner) = spawn_or_insert {
|
||||
// SAFE: `entity` is allocated (but non existent), bundle matches inserter
|
||||
unsafe { spawner.spawn_non_existent(entity, bundle) };
|
||||
} else {
|
||||
let mut spawner = bundle_info.get_bundle_spawner(
|
||||
&mut self.entities,
|
||||
&mut self.archetypes,
|
||||
&mut self.components,
|
||||
&mut self.storages,
|
||||
change_tick,
|
||||
);
|
||||
// SAFE: `entity` is valid, `location` matches entity, bundle matches inserter
|
||||
unsafe { spawner.spawn_non_existent(entity, bundle) };
|
||||
spawn_or_insert = SpawnOrInsert::Spawn(spawner);
|
||||
}
|
||||
}
|
||||
AllocAtWithoutReplacement::ExistsWithWrongGeneration => {
|
||||
invalid_entities.push(entity);
|
||||
|
@ -959,9 +956,11 @@ impl World {
|
|||
let column = unique_components.get_mut(component_id).unwrap_or_else(|| {
|
||||
panic!("resource does not exist: {}", std::any::type_name::<R>())
|
||||
});
|
||||
if column.is_empty() {
|
||||
panic!("resource does not exist: {}", std::any::type_name::<R>());
|
||||
}
|
||||
assert!(
|
||||
!column.is_empty(),
|
||||
"resource does not exist: {}",
|
||||
std::any::type_name::<R>()
|
||||
);
|
||||
// SAFE: if a resource column exists, row 0 exists as well. caller takes ownership of
|
||||
// the ptr value / drop is called when R is dropped
|
||||
unsafe { column.swap_remove_and_forget_unchecked(0) }
|
||||
|
@ -1126,12 +1125,11 @@ impl World {
|
|||
}
|
||||
|
||||
pub(crate) fn validate_non_send_access<T: 'static>(&self) {
|
||||
if !self.main_thread_validator.is_main_thread() {
|
||||
panic!(
|
||||
"attempted to access NonSend resource {} off of the main thread",
|
||||
std::any::type_name::<T>()
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
self.main_thread_validator.is_main_thread(),
|
||||
"attempted to access NonSend resource {} off of the main thread",
|
||||
std::any::type_name::<T>(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Empties queued entities and adds them to the empty [Archetype](crate::archetype::Archetype).
|
||||
|
|
|
@ -90,12 +90,11 @@ impl<'w, T> WorldBorrow<'w, T> {
|
|||
archetype_component_id: ArchetypeComponentId,
|
||||
access: Rc<RefCell<ArchetypeComponentAccess>>,
|
||||
) -> Self {
|
||||
if !access.borrow_mut().read(archetype_component_id) {
|
||||
panic!(
|
||||
"Attempted to immutably access {}, but it is already mutably borrowed",
|
||||
std::any::type_name::<T>()
|
||||
)
|
||||
}
|
||||
assert!(
|
||||
access.borrow_mut().read(archetype_component_id),
|
||||
"Attempted to immutably access {}, but it is already mutably borrowed",
|
||||
std::any::type_name::<T>(),
|
||||
);
|
||||
Self {
|
||||
value,
|
||||
archetype_component_id,
|
||||
|
@ -132,12 +131,11 @@ impl<'w, T> WorldBorrowMut<'w, T> {
|
|||
archetype_component_id: ArchetypeComponentId,
|
||||
access: Rc<RefCell<ArchetypeComponentAccess>>,
|
||||
) -> Self {
|
||||
if !access.borrow_mut().write(archetype_component_id) {
|
||||
panic!(
|
||||
"Attempted to mutably access {}, but it is already mutably borrowed",
|
||||
std::any::type_name::<T>()
|
||||
)
|
||||
}
|
||||
assert!(
|
||||
access.borrow_mut().write(archetype_component_id),
|
||||
"Attempted to mutably access {}, but it is already mutably borrowed",
|
||||
std::any::type_name::<T>(),
|
||||
);
|
||||
Self {
|
||||
value,
|
||||
archetype_component_id,
|
||||
|
|
|
@ -349,18 +349,17 @@ async fn load_texture<'a>(
|
|||
.decode_utf8()
|
||||
.unwrap();
|
||||
let uri = uri.as_ref();
|
||||
let (bytes, image_type) = match DataUri::parse(uri) {
|
||||
Ok(data_uri) => (data_uri.decode()?, ImageType::MimeType(data_uri.mime_type)),
|
||||
Err(()) => {
|
||||
let parent = load_context.path().parent().unwrap();
|
||||
let image_path = parent.join(uri);
|
||||
let bytes = load_context.read_asset_bytes(image_path.clone()).await?;
|
||||
let (bytes, image_type) = if let Ok(data_uri) = DataUri::parse(uri) {
|
||||
(data_uri.decode()?, ImageType::MimeType(data_uri.mime_type))
|
||||
} else {
|
||||
let parent = load_context.path().parent().unwrap();
|
||||
let image_path = parent.join(uri);
|
||||
let bytes = load_context.read_asset_bytes(image_path.clone()).await?;
|
||||
|
||||
let extension = Path::new(uri).extension().unwrap().to_str().unwrap();
|
||||
let image_type = ImageType::Extension(extension);
|
||||
let extension = Path::new(uri).extension().unwrap().to_str().unwrap();
|
||||
let image_type = ImageType::Extension(extension);
|
||||
|
||||
(bytes, image_type)
|
||||
}
|
||||
(bytes, image_type)
|
||||
};
|
||||
|
||||
Image::from_buffer(
|
||||
|
@ -776,7 +775,7 @@ fn resolve_node_hierarchy(
|
|||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, (label, node, children))| {
|
||||
for child in children.iter() {
|
||||
for child in &children {
|
||||
if let Some(parent) = parents.get_mut(*child) {
|
||||
*parent = Some(i);
|
||||
} else if !has_errored {
|
||||
|
@ -802,7 +801,7 @@ fn resolve_node_hierarchy(
|
|||
|
||||
assert!(parent_children.remove(&index));
|
||||
if let Some((_, child_node)) = nodes.get(&index) {
|
||||
parent_node.children.push(child_node.clone())
|
||||
parent_node.children.push(child_node.clone());
|
||||
}
|
||||
if parent_children.is_empty() {
|
||||
empty_children.push_back(parent_index);
|
||||
|
|
|
@ -279,23 +279,23 @@ pub fn gamepad_event_system(
|
|||
match event {
|
||||
GamepadEventType::Connected => {
|
||||
events.send(GamepadEvent(gamepad, event.clone()));
|
||||
for button_type in ALL_BUTTON_TYPES.iter() {
|
||||
for button_type in &ALL_BUTTON_TYPES {
|
||||
let gamepad_button = GamepadButton(gamepad, *button_type);
|
||||
button_input.reset(gamepad_button);
|
||||
button_axis.set(gamepad_button, 0.0);
|
||||
}
|
||||
for axis_type in ALL_AXIS_TYPES.iter() {
|
||||
for axis_type in &ALL_AXIS_TYPES {
|
||||
axis.set(GamepadAxis(gamepad, *axis_type), 0.0);
|
||||
}
|
||||
}
|
||||
GamepadEventType::Disconnected => {
|
||||
events.send(GamepadEvent(gamepad, event.clone()));
|
||||
for button_type in ALL_BUTTON_TYPES.iter() {
|
||||
for button_type in &ALL_BUTTON_TYPES {
|
||||
let gamepad_button = GamepadButton(gamepad, *button_type);
|
||||
button_input.reset(gamepad_button);
|
||||
button_axis.remove(gamepad_button);
|
||||
}
|
||||
for axis_type in ALL_AXIS_TYPES.iter() {
|
||||
for axis_type in &ALL_AXIS_TYPES {
|
||||
axis.remove(GamepadAxis(gamepad, *axis_type));
|
||||
}
|
||||
}
|
||||
|
@ -309,7 +309,7 @@ pub fn gamepad_event_system(
|
|||
events.send(GamepadEvent(
|
||||
gamepad,
|
||||
GamepadEventType::AxisChanged(*axis_type, filtered_value),
|
||||
))
|
||||
));
|
||||
}
|
||||
}
|
||||
GamepadEventType::ButtonChanged(button_type, value) => {
|
||||
|
@ -322,7 +322,7 @@ pub fn gamepad_event_system(
|
|||
events.send(GamepadEvent(
|
||||
gamepad,
|
||||
GamepadEventType::ButtonChanged(*button_type, filtered_value),
|
||||
))
|
||||
));
|
||||
}
|
||||
|
||||
let button_property = settings.get_button_settings(gamepad_button);
|
||||
|
|
|
@ -76,7 +76,7 @@ impl BevyManifest {
|
|||
///
|
||||
/// - `input`: The [`syn::DeriveInput`] for struct that is deriving the label trait
|
||||
/// - `trait_path`: The path [`syn::Path`] to the label trait
|
||||
pub fn derive_label(input: syn::DeriveInput, trait_path: syn::Path) -> TokenStream {
|
||||
pub fn derive_label(input: syn::DeriveInput, trait_path: &syn::Path) -> TokenStream {
|
||||
let ident = input.ident;
|
||||
|
||||
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||
|
|
|
@ -604,7 +604,7 @@ pub fn assign_lights_to_clusters(
|
|||
}
|
||||
}
|
||||
|
||||
for cluster_lights in clusters_lights.iter_mut() {
|
||||
for cluster_lights in &mut clusters_lights {
|
||||
cluster_lights.entities.shrink_to_fit();
|
||||
}
|
||||
|
||||
|
@ -821,7 +821,7 @@ pub fn check_light_mesh_visibility(
|
|||
} else {
|
||||
computed_visibility.is_visible = true;
|
||||
for visible_entities in cubemap_visible_entities.iter_mut() {
|
||||
visible_entities.entities.push(entity)
|
||||
visible_entities.entities.push(entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -324,7 +324,7 @@ pub fn queue_material_meshes<M: SpecializedMaterial>(
|
|||
}
|
||||
let alpha_mode = M::alpha_mode(material);
|
||||
if let AlphaMode::Blend = alpha_mode {
|
||||
mesh_key |= MeshPipelineKey::TRANSPARENT_MAIN_PASS
|
||||
mesh_key |= MeshPipelineKey::TRANSPARENT_MAIN_PASS;
|
||||
}
|
||||
|
||||
let specialized_key = M::key(material);
|
||||
|
|
|
@ -241,7 +241,7 @@ impl RenderAsset for StandardMaterial {
|
|||
AlphaMode::Opaque => flags |= StandardMaterialFlags::ALPHA_MODE_OPAQUE,
|
||||
AlphaMode::Mask(c) => {
|
||||
alpha_cutoff = c;
|
||||
flags |= StandardMaterialFlags::ALPHA_MODE_MASK
|
||||
flags |= StandardMaterialFlags::ALPHA_MODE_MASK;
|
||||
}
|
||||
AlphaMode::Blend => flags |= StandardMaterialFlags::ALPHA_MODE_BLEND,
|
||||
};
|
||||
|
|
|
@ -1237,7 +1237,7 @@ impl Node for ShadowPassNode {
|
|||
.begin_render_pass(&pass_descriptor);
|
||||
let mut draw_functions = draw_functions.write();
|
||||
let mut tracked_pass = TrackedRenderPass::new(render_pass);
|
||||
for item in shadow_phase.items.iter() {
|
||||
for item in &shadow_phase.items {
|
||||
let draw_function = draw_functions.get_mut(item.draw_function).unwrap();
|
||||
draw_function.draw(world, &mut tracked_pass, view_light_entity, item);
|
||||
}
|
||||
|
|
|
@ -138,7 +138,7 @@ pub fn derive_reflect(input: TokenStream) -> TokenStream {
|
|||
DeriveType::Struct | DeriveType::UnitStruct => impl_struct(
|
||||
type_name,
|
||||
&ast.generics,
|
||||
get_type_registration_impl,
|
||||
&get_type_registration_impl,
|
||||
&bevy_reflect_path,
|
||||
&reflect_attrs,
|
||||
&active_fields,
|
||||
|
@ -164,7 +164,7 @@ pub fn derive_reflect(input: TokenStream) -> TokenStream {
|
|||
fn impl_struct(
|
||||
struct_name: &Ident,
|
||||
generics: &Generics,
|
||||
get_type_registration_impl: proc_macro2::TokenStream,
|
||||
get_type_registration_impl: &proc_macro2::TokenStream,
|
||||
bevy_reflect_path: &Path,
|
||||
reflect_attrs: &ReflectAttrs,
|
||||
active_fields: &[(&Field, usize)],
|
||||
|
@ -619,15 +619,15 @@ impl ReflectAttrs {
|
|||
match ident.as_str() {
|
||||
"PartialEq" => {
|
||||
attrs.reflect_partial_eq =
|
||||
TraitImpl::Custom(segment.ident.clone())
|
||||
TraitImpl::Custom(segment.ident.clone());
|
||||
}
|
||||
"Hash" => {
|
||||
attrs.reflect_hash =
|
||||
TraitImpl::Custom(segment.ident.clone())
|
||||
TraitImpl::Custom(segment.ident.clone());
|
||||
}
|
||||
"Serialize" => {
|
||||
attrs.serialize =
|
||||
TraitImpl::Custom(segment.ident.clone())
|
||||
TraitImpl::Custom(segment.ident.clone());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -739,7 +739,7 @@ pub fn external_type_uuid(tokens: proc_macro::TokenStream) -> proc_macro::TokenS
|
|||
|
||||
#[proc_macro_attribute]
|
||||
pub fn reflect_trait(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
reflect_trait::reflect_trait(args, input)
|
||||
reflect_trait::reflect_trait(&args, input)
|
||||
}
|
||||
|
||||
#[proc_macro_derive(FromReflect)]
|
||||
|
|
|
@ -22,7 +22,7 @@ impl Parse for TraitInfo {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn reflect_trait(_args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
pub fn reflect_trait(_args: &TokenStream, input: TokenStream) -> TokenStream {
|
||||
let trait_info = parse_macro_input!(input as TraitInfo);
|
||||
let item_trait = &trait_info.item_trait;
|
||||
let trait_ident = &item_trait.ident;
|
||||
|
|
|
@ -15,9 +15,10 @@ pub fn type_uuid_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStre
|
|||
let name = &ast.ident;
|
||||
|
||||
let (impl_generics, type_generics, _) = &ast.generics.split_for_impl();
|
||||
if !impl_generics.to_token_stream().is_empty() || !type_generics.to_token_stream().is_empty() {
|
||||
panic!("#[derive(TypeUuid)] is not supported for generics.");
|
||||
}
|
||||
assert!(
|
||||
impl_generics.to_token_stream().is_empty() && type_generics.to_token_stream().is_empty(),
|
||||
"#[derive(TypeUuid)] is not supported for generics.",
|
||||
);
|
||||
|
||||
let mut uuid = None;
|
||||
for attribute in ast.attrs.iter().filter_map(|attr| attr.parse_meta().ok()) {
|
||||
|
|
|
@ -186,7 +186,7 @@ impl<K: Reflect + Eq + Hash, V: Reflect> Map for HashMap<K, V> {
|
|||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
HashMap::len(self)
|
||||
Self::len(self)
|
||||
}
|
||||
|
||||
fn iter(&self) -> MapIter {
|
||||
|
@ -199,7 +199,7 @@ impl<K: Reflect + Eq + Hash, V: Reflect> Map for HashMap<K, V> {
|
|||
fn clone_dynamic(&self) -> DynamicMap {
|
||||
let mut dynamic_map = DynamicMap::default();
|
||||
dynamic_map.set_name(self.type_name().to_string());
|
||||
for (k, v) in HashMap::iter(self) {
|
||||
for (k, v) in self {
|
||||
dynamic_map.insert_boxed(k.clone_value(), v.clone_value());
|
||||
}
|
||||
dynamic_map
|
||||
|
@ -268,8 +268,8 @@ where
|
|||
V: Reflect + Clone + for<'de> Deserialize<'de>,
|
||||
{
|
||||
fn get_type_registration() -> TypeRegistration {
|
||||
let mut registration = TypeRegistration::of::<HashMap<K, V>>();
|
||||
registration.insert::<ReflectDeserialize>(FromType::<HashMap<K, V>>::from_type());
|
||||
let mut registration = TypeRegistration::of::<Self>();
|
||||
registration.insert::<ReflectDeserialize>(FromType::<Self>::from_type());
|
||||
registration
|
||||
}
|
||||
}
|
||||
|
@ -371,6 +371,6 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_serialize_duration() {
|
||||
assert!(std::time::Duration::ZERO.serializable().is_some())
|
||||
assert!(std::time::Duration::ZERO.serializable().is_some());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,7 +151,7 @@ unsafe impl Reflect for DynamicMap {
|
|||
if let ReflectRef::Map(map_value) = value.reflect_ref() {
|
||||
for (key, value) in map_value.iter() {
|
||||
if let Some(v) = self.get_mut(key) {
|
||||
v.apply(value)
|
||||
v.apply(value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -289,7 +289,7 @@ unsafe impl Reflect for DynamicStruct {
|
|||
for (i, value) in struct_value.iter_fields().enumerate() {
|
||||
let name = struct_value.name_at(i).unwrap();
|
||||
if let Some(v) = self.field_mut(name) {
|
||||
v.apply(value)
|
||||
v.apply(value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -269,7 +269,7 @@ pub fn tuple_apply<T: Tuple>(a: &mut T, b: &dyn Reflect) {
|
|||
if let ReflectRef::Tuple(tuple) = b.reflect_ref() {
|
||||
for (i, value) in tuple.iter_fields().enumerate() {
|
||||
if let Some(v) = a.field_mut(i) {
|
||||
v.apply(value)
|
||||
v.apply(value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -228,7 +228,7 @@ unsafe impl Reflect for DynamicTupleStruct {
|
|||
if let ReflectRef::TupleStruct(tuple_struct) = value.reflect_ref() {
|
||||
for (i, value) in tuple_struct.iter_fields().enumerate() {
|
||||
if let Some(v) = self.field_mut(i) {
|
||||
v.apply(value)
|
||||
v.apply(value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -276,7 +276,7 @@ impl TypeRegistration {
|
|||
impl Clone for TypeRegistration {
|
||||
fn clone(&self) -> Self {
|
||||
let mut data = HashMap::default();
|
||||
for (id, type_data) in self.data.iter() {
|
||||
for (id, type_data) in &self.data {
|
||||
data.insert(*id, (*type_data).clone_type_data());
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ pub fn active_cameras_system(
|
|||
mut active_cameras: ResMut<ActiveCameras>,
|
||||
query: Query<(Entity, &Camera)>,
|
||||
) {
|
||||
for (name, active_camera) in active_cameras.cameras.iter_mut() {
|
||||
for (name, active_camera) in &mut active_cameras.cameras {
|
||||
if active_camera
|
||||
.entity
|
||||
.map_or(false, |entity| query.get(entity).is_err())
|
||||
|
|
|
@ -105,5 +105,5 @@ fn extract_cameras(
|
|||
}
|
||||
}
|
||||
|
||||
commands.insert_resource(ExtractedCameraNames { entities })
|
||||
commands.insert_resource(ExtractedCameraNames { entities });
|
||||
}
|
||||
|
|
|
@ -592,7 +592,7 @@ impl Add<Color> for Color {
|
|||
impl AddAssign<Vec4> for Color {
|
||||
fn add_assign(&mut self, rhs: Vec4) {
|
||||
let rhs: Color = rhs.into();
|
||||
*self += rhs
|
||||
*self += rhs;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ impl Mesh {
|
|||
/// Panics if the attributes have different vertex counts.
|
||||
pub fn count_vertices(&self) -> usize {
|
||||
let mut vertex_count: Option<usize> = None;
|
||||
for (attribute_name, attribute_data) in self.attributes.iter() {
|
||||
for (attribute_name, attribute_data) in &self.attributes {
|
||||
let attribute_len = attribute_data.len();
|
||||
if let Some(previous_vertex_count) = vertex_count {
|
||||
assert_eq!(previous_vertex_count, attribute_len,
|
||||
|
@ -230,7 +230,7 @@ impl Mesh {
|
|||
Some(indices) => indices,
|
||||
None => return,
|
||||
};
|
||||
for (_, attributes) in self.attributes.iter_mut() {
|
||||
for attributes in self.attributes.values_mut() {
|
||||
let indices = indices.iter();
|
||||
match attributes {
|
||||
VertexAttributeValues::Float32(vec) => *vec = duplicate(vec, indices),
|
||||
|
@ -271,9 +271,7 @@ impl Mesh {
|
|||
/// Panics if [`Indices`] are set or [`Mesh::ATTRIBUTE_POSITION`] is not of type `float3`.
|
||||
/// Consider calling [`Mesh::duplicate_vertices`] or export your mesh with normal attributes.
|
||||
pub fn compute_flat_normals(&mut self) {
|
||||
if self.indices().is_some() {
|
||||
panic!("`compute_flat_normals` can't work on indexed geometry. Consider calling `Mesh::duplicate_vertices`.");
|
||||
}
|
||||
assert!(self.indices().is_none(), "`compute_flat_normals` can't work on indexed geometry. Consider calling `Mesh::duplicate_vertices`.");
|
||||
|
||||
let positions = self
|
||||
.attribute(Mesh::ATTRIBUTE_POSITION)
|
||||
|
|
|
@ -163,7 +163,7 @@ impl From<Quad> for Mesh {
|
|||
let mut positions = Vec::<[f32; 3]>::new();
|
||||
let mut normals = Vec::<[f32; 3]>::new();
|
||||
let mut uvs = Vec::<[f32; 2]>::new();
|
||||
for (position, normal, uv) in vertices.iter() {
|
||||
for (position, normal, uv) in &vertices {
|
||||
positions.push(*position);
|
||||
normals.push(*normal);
|
||||
uvs.push(*uv);
|
||||
|
@ -207,7 +207,7 @@ impl From<Plane> for Mesh {
|
|||
let mut positions = Vec::new();
|
||||
let mut normals = Vec::new();
|
||||
let mut uvs = Vec::new();
|
||||
for (position, normal, uv) in vertices.iter() {
|
||||
for (position, normal, uv) in &vertices {
|
||||
positions.push(*position);
|
||||
normals.push(*normal);
|
||||
uvs.push(*uv);
|
||||
|
|
|
@ -119,7 +119,7 @@ fn extract_render_asset<A: RenderAsset>(
|
|||
commands.insert_resource(ExtractedAssets {
|
||||
extracted: extracted_assets,
|
||||
removed,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/// Specifies all ECS data required by [`PrepareAssetSystem`].
|
||||
|
|
|
@ -71,9 +71,7 @@ impl RenderGraph {
|
|||
|
||||
/// Creates an [`GraphInputNode`] with the specified slots if not already present.
|
||||
pub fn set_input(&mut self, inputs: Vec<SlotInfo>) -> NodeId {
|
||||
if self.input_node.is_some() {
|
||||
panic!("Graph already has an input node");
|
||||
}
|
||||
assert!(self.input_node.is_none(), "Graph already has an input node");
|
||||
|
||||
let id = self.add_node("GraphInputNode", GraphInputNode { inputs });
|
||||
self.input_node = Some(id);
|
||||
|
|
|
@ -257,7 +257,7 @@ impl<'a> TrackedRenderPass<'a> {
|
|||
offset,
|
||||
data.len()
|
||||
);
|
||||
self.pass.set_push_constants(stages, offset, data)
|
||||
self.pass.set_push_constants(stages, offset, data);
|
||||
}
|
||||
|
||||
/// Set the rendering viewport.
|
||||
|
@ -282,7 +282,7 @@ impl<'a> TrackedRenderPass<'a> {
|
|||
max_depth
|
||||
);
|
||||
self.pass
|
||||
.set_viewport(x, y, width, height, min_depth, max_depth)
|
||||
.set_viewport(x, y, width, height, min_depth, max_depth);
|
||||
}
|
||||
|
||||
/// Insert a single debug marker.
|
||||
|
@ -290,7 +290,7 @@ impl<'a> TrackedRenderPass<'a> {
|
|||
/// This is a GPU debugging feature. This has no effect on the rendering itself.
|
||||
pub fn insert_debug_marker(&mut self, label: &str) {
|
||||
trace!("insert debug marker: {}", label);
|
||||
self.pass.insert_debug_marker(label)
|
||||
self.pass.insert_debug_marker(label);
|
||||
}
|
||||
|
||||
/// Start a new debug group.
|
||||
|
@ -315,7 +315,7 @@ impl<'a> TrackedRenderPass<'a> {
|
|||
/// [`pop_debug_group`]: TrackedRenderPass::pop_debug_group
|
||||
pub fn push_debug_group(&mut self, label: &str) {
|
||||
trace!("push_debug_group marker: {}", label);
|
||||
self.pass.push_debug_group(label)
|
||||
self.pass.push_debug_group(label);
|
||||
}
|
||||
|
||||
/// End the current debug group.
|
||||
|
@ -332,11 +332,11 @@ impl<'a> TrackedRenderPass<'a> {
|
|||
/// [`pop_debug_group`]: TrackedRenderPass::pop_debug_group
|
||||
pub fn pop_debug_group(&mut self) {
|
||||
trace!("pop_debug_group");
|
||||
self.pass.pop_debug_group()
|
||||
self.pass.pop_debug_group();
|
||||
}
|
||||
|
||||
pub fn set_blend_constant(&mut self, color: Color) {
|
||||
trace!("set blend constant: {:?}", color);
|
||||
self.pass.set_blend_constant(wgpu::Color::from(color))
|
||||
self.pass.set_blend_constant(wgpu::Color::from(color));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ impl Buffer {
|
|||
|
||||
#[inline]
|
||||
pub fn unmap(&self) {
|
||||
self.value.unmap()
|
||||
self.value.unmap();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -73,6 +73,7 @@ impl Shader {
|
|||
self.import_path = Some(ShaderImport::Custom(import_path.into()));
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_import_path<P: Into<String>>(mut self, import_path: P) -> Self {
|
||||
self.set_import_path(import_path);
|
||||
self
|
||||
|
|
|
@ -46,7 +46,7 @@ impl RenderDevice {
|
|||
/// no-op on the web, device is automatically polled.
|
||||
#[inline]
|
||||
pub fn poll(&self, maintain: wgpu::Maintain) {
|
||||
self.device.poll(maintain)
|
||||
self.device.poll(maintain);
|
||||
}
|
||||
|
||||
/// Creates an empty [`CommandEncoder`](wgpu::CommandEncoder).
|
||||
|
@ -144,7 +144,7 @@ impl RenderDevice {
|
|||
/// - A old [`SurfaceTexture`](wgpu::SurfaceTexture) is still alive referencing an old surface.
|
||||
/// - Texture format requested is unsupported on the surface.
|
||||
pub fn configure_surface(&self, surface: &wgpu::Surface, config: &wgpu::SurfaceConfiguration) {
|
||||
surface.configure(&self.device, config)
|
||||
surface.configure(&self.device, config);
|
||||
}
|
||||
|
||||
/// Returns the wgpu [`Device`](wgpu::Device).
|
||||
|
@ -155,9 +155,10 @@ impl RenderDevice {
|
|||
pub fn map_buffer(&self, buffer: &wgpu::BufferSlice, map_mode: wgpu::MapMode) {
|
||||
let data = buffer.map_async(map_mode);
|
||||
self.poll(wgpu::Maintain::Wait);
|
||||
if future::block_on(data).is_err() {
|
||||
panic!("Failed to map buffer to host.");
|
||||
}
|
||||
assert!(
|
||||
future::block_on(data).is_ok(),
|
||||
"Failed to map buffer to host."
|
||||
);
|
||||
}
|
||||
|
||||
pub fn align_copy_bytes_per_row(row_bytes: usize) -> usize {
|
||||
|
|
|
@ -74,7 +74,7 @@ mod tests {
|
|||
#[test]
|
||||
fn test_supported_file_extensions() {
|
||||
for ext in FILE_EXTENSIONS {
|
||||
assert!(image::ImageFormat::from_extension(ext).is_some())
|
||||
assert!(image::ImageFormat::from_extension(ext).is_some());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -75,6 +75,7 @@ impl RenderLayers {
|
|||
///
|
||||
/// # Panics
|
||||
/// Panics when called with a layer greater than `TOTAL_LAYERS - 1`.
|
||||
#[must_use]
|
||||
pub fn with(mut self, layer: Layer) -> Self {
|
||||
assert!(usize::from(layer) < Self::TOTAL_LAYERS);
|
||||
self.0 |= 1 << layer;
|
||||
|
@ -85,6 +86,7 @@ impl RenderLayers {
|
|||
///
|
||||
/// # Panics
|
||||
/// Panics when called with a layer greater than `TOTAL_LAYERS - 1`.
|
||||
#[must_use]
|
||||
pub fn without(mut self, layer: Layer) -> Self {
|
||||
assert!(usize::from(layer) < Self::TOTAL_LAYERS);
|
||||
self.0 &= !(1 << layer);
|
||||
|
@ -173,6 +175,6 @@ mod rendering_mask_tests {
|
|||
RenderLayers::from_layers(&[0, 1, 2]),
|
||||
<RenderLayers as std::iter::FromIterator<Layer>>::from_iter(vec![0, 1, 2]),
|
||||
"from_layers and from_iter are equivalent"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ impl DynamicScene {
|
|||
let registry = world.get_resource::<TypeRegistryArc>().unwrap().clone();
|
||||
let type_registry = registry.read();
|
||||
|
||||
for scene_entity in self.entities.iter() {
|
||||
for scene_entity in &self.entities {
|
||||
// Fetch the entity with the given entity id from the `entity_map`
|
||||
// or spawn a new entity with a transiently unique id if there is
|
||||
// no corresponding entry.
|
||||
|
@ -91,7 +91,7 @@ impl DynamicScene {
|
|||
.or_insert_with(|| world.spawn().id());
|
||||
|
||||
// Apply/ add each component to the given entity.
|
||||
for component in scene_entity.components.iter() {
|
||||
for component in &scene_entity.components {
|
||||
let registration = type_registry
|
||||
.get_with_name(component.type_name())
|
||||
.ok_or_else(|| SceneSpawnError::UnregisteredType {
|
||||
|
|
|
@ -15,7 +15,7 @@ impl FromWorld for SceneLoader {
|
|||
fn from_world(world: &mut World) -> Self {
|
||||
let type_registry = world.get_resource::<TypeRegistryArc>().unwrap();
|
||||
SceneLoader {
|
||||
type_registry: (&*type_registry).clone(),
|
||||
type_registry: (*type_registry).clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -242,7 +242,7 @@ impl SceneSpawner {
|
|||
match self.spawn_dynamic_sync(world, &scene_handle) {
|
||||
Ok(_) => {}
|
||||
Err(SceneSpawnError::NonExistentScene { .. }) => {
|
||||
self.dynamic_scenes_to_spawn.push(scene_handle)
|
||||
self.dynamic_scenes_to_spawn.push(scene_handle);
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
|
@ -254,7 +254,7 @@ impl SceneSpawner {
|
|||
match self.spawn_sync_internal(world, scene_handle, instance_id) {
|
||||
Ok(_) => {}
|
||||
Err(SceneSpawnError::NonExistentRealScene { handle }) => {
|
||||
self.scenes_to_spawn.push((handle, instance_id))
|
||||
self.scenes_to_spawn.push((handle, instance_id));
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ impl<'a> Serialize for SceneSerializer<'a> {
|
|||
S: serde::Serializer,
|
||||
{
|
||||
let mut state = serializer.serialize_seq(Some(self.scene.entities.len()))?;
|
||||
for entity in self.scene.entities.iter() {
|
||||
for entity in &self.scene.entities {
|
||||
state.serialize_element(&EntitySerializer {
|
||||
entity,
|
||||
registry: self.registry,
|
||||
|
|
|
@ -107,7 +107,7 @@ impl TextureAtlas {
|
|||
sprites.push(Rect {
|
||||
min: rect_min,
|
||||
max: Vec2::new(rect_min.x + tile_size.x, rect_min.y + tile_size.y),
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ pub enum TextureAtlasBuilderError {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[must_use]
|
||||
/// A builder which is used to create a texture atlas from many individual
|
||||
/// sprites.
|
||||
pub struct TextureAtlasBuilder {
|
||||
|
@ -88,7 +89,7 @@ impl TextureAtlasBuilder {
|
|||
texture.texture_descriptor.size.height,
|
||||
1,
|
||||
),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fn copy_texture_to_atlas(
|
||||
|
|
|
@ -24,7 +24,7 @@ fn main() {
|
|||
std::thread::current().id(),
|
||||
i
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ fn main() {
|
|||
std::thread::current().id(),
|
||||
i
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ mod tests {
|
|||
countdown_event.decrement();
|
||||
let countdown_event_clone = countdown_event.clone();
|
||||
let handle = std::thread::spawn(move || {
|
||||
futures_lite::future::block_on(countdown_event_clone.listen())
|
||||
futures_lite::future::block_on(countdown_event_clone.listen());
|
||||
});
|
||||
|
||||
// Pause to give the new thread time to start blocking (ugly hack)
|
||||
|
|
|
@ -129,13 +129,10 @@ where
|
|||
{
|
||||
fn next_batch(&mut self) -> Option<B> {
|
||||
match &mut self.iter {
|
||||
Some(iter) => match iter.next_batch() {
|
||||
b @ Some(_) => b,
|
||||
None => {
|
||||
self.iter = None;
|
||||
None
|
||||
}
|
||||
},
|
||||
Some(iter) => iter.next_batch().or_else(|| {
|
||||
self.iter = None;
|
||||
None
|
||||
}),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
@ -202,12 +199,9 @@ where
|
|||
P: ParallelIterator<B> + Clone,
|
||||
{
|
||||
fn next_batch(&mut self) -> Option<B> {
|
||||
match self.curr.as_mut().and_then(|c| c.next_batch()) {
|
||||
batch @ Some(_) => batch,
|
||||
None => {
|
||||
self.curr = Some(self.iter.clone());
|
||||
self.next_batch()
|
||||
}
|
||||
}
|
||||
self.curr.as_mut().and_then(|c| c.next_batch()).or_else(|| {
|
||||
self.curr = Some(self.iter.clone());
|
||||
self.next_batch()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ where
|
|||
fn count(mut self, pool: &TaskPool) -> usize {
|
||||
pool.scope(|s| {
|
||||
while let Some(batch) = self.next_batch() {
|
||||
s.spawn(async move { batch.count() })
|
||||
s.spawn(async move { batch.count() });
|
||||
}
|
||||
})
|
||||
.iter()
|
||||
|
@ -219,7 +219,7 @@ where
|
|||
pool.scope(|s| {
|
||||
while let Some(batch) = self.next_batch() {
|
||||
let newf = f.clone();
|
||||
s.spawn(async move { batch.partition::<Vec<_>, F>(newf) })
|
||||
s.spawn(async move { batch.partition::<Vec<_>, F>(newf) });
|
||||
}
|
||||
})
|
||||
.into_iter()
|
||||
|
|
|
@ -12,6 +12,7 @@ use crate::Task;
|
|||
|
||||
/// Used to create a [`TaskPool`]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
#[must_use]
|
||||
pub struct TaskPoolBuilder {
|
||||
/// If set, we'll set up the thread pool to use at most n threads. Otherwise use
|
||||
/// the logical core count of the system
|
||||
|
|
|
@ -37,8 +37,7 @@ impl Font {
|
|||
TextureDimension::D2,
|
||||
alpha
|
||||
.iter()
|
||||
.map(|a| vec![255, 255, 255, (*a * 255.0) as u8])
|
||||
.flatten()
|
||||
.flat_map(|a| vec![255, 255, 255, (*a * 255.0) as u8])
|
||||
.collect::<Vec<u8>>(),
|
||||
TextureFormat::Rgba8UnormSrgb,
|
||||
)
|
||||
|
|
|
@ -73,7 +73,7 @@ impl GlyphBrush {
|
|||
|
||||
let mut max_y = std::f32::MIN;
|
||||
let mut min_x = std::f32::MAX;
|
||||
for sg in glyphs.iter() {
|
||||
for sg in &glyphs {
|
||||
let glyph = &sg.glyph;
|
||||
let scaled_font = sections_data[sg.section_index].3;
|
||||
max_y = max_y.max(glyph.position.y - scaled_font.descent());
|
||||
|
|
|
@ -103,7 +103,7 @@ impl<ID: Hash + Eq> TextPipeline<ID> {
|
|||
let mut max_x: f32 = std::f32::MIN;
|
||||
let mut max_y: f32 = std::f32::MIN;
|
||||
|
||||
for sg in section_glyphs.iter() {
|
||||
for sg in §ion_glyphs {
|
||||
let scaled_font = scaled_fonts[sg.section_index];
|
||||
let glyph = &sg.glyph;
|
||||
min_x = min_x.min(glyph.position.x);
|
||||
|
|
|
@ -94,6 +94,7 @@ impl GlobalTransform {
|
|||
|
||||
#[doc(hidden)]
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn looking_at(mut self, target: Vec3, up: Vec3) -> Self {
|
||||
self.look_at(target, up);
|
||||
self
|
||||
|
@ -197,11 +198,12 @@ impl GlobalTransform {
|
|||
/// Multiplies `self` with `transform` component by component, returning the
|
||||
/// resulting [`GlobalTransform`]
|
||||
#[inline]
|
||||
pub fn mul_transform(&self, transform: Transform) -> GlobalTransform {
|
||||
#[must_use]
|
||||
pub fn mul_transform(&self, transform: Transform) -> Self {
|
||||
let translation = self.mul_vec3(transform.translation);
|
||||
let rotation = self.rotation * transform.rotation;
|
||||
let scale = self.scale * transform.scale;
|
||||
GlobalTransform {
|
||||
Self {
|
||||
translation,
|
||||
rotation,
|
||||
scale,
|
||||
|
|
|
@ -103,6 +103,7 @@ impl Transform {
|
|||
/// local z direction is toward `target` and its unit vector in the local y direction
|
||||
/// is toward `up`.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn looking_at(mut self, target: Vec3, up: Vec3) -> Self {
|
||||
self.look_at(target, up);
|
||||
self
|
||||
|
@ -207,6 +208,7 @@ impl Transform {
|
|||
/// Multiplies `self` with `transform` component by component, returning the
|
||||
/// resulting [`Transform`]
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn mul_transform(&self, transform: Transform) -> Self {
|
||||
let translation = self.mul_vec3(transform.translation);
|
||||
let rotation = self.rotation * transform.rotation;
|
||||
|
|
|
@ -292,7 +292,7 @@ pub fn prepare_uinodes(
|
|||
let mut end = 0;
|
||||
let mut current_batch_handle = Default::default();
|
||||
let mut last_z = 0.0;
|
||||
for extracted_uinode in extracted_uinodes.uinodes.iter() {
|
||||
for extracted_uinode in &extracted_uinodes.uinodes {
|
||||
if current_batch_handle != extracted_uinode.image {
|
||||
if start != end {
|
||||
commands.spawn_bundle((UiBatch {
|
||||
|
|
|
@ -92,7 +92,7 @@ impl bevy_render::render_graph::Node for UiPassNode {
|
|||
|
||||
let mut draw_functions = draw_functions.write();
|
||||
let mut tracked_pass = TrackedRenderPass::new(render_pass);
|
||||
for item in transparent_phase.items.iter() {
|
||||
for item in &transparent_phase.items {
|
||||
let draw_function = draw_functions.get_mut(item.draw_function).unwrap();
|
||||
draw_function.draw(world, &mut tracked_pass, view_entity, item);
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ pub fn update_clipping_system(
|
|||
&mut node_query,
|
||||
root_node,
|
||||
None,
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -95,7 +95,8 @@ impl Default for WindowResizeConstraints {
|
|||
}
|
||||
|
||||
impl WindowResizeConstraints {
|
||||
pub fn check_constraints(&self) -> WindowResizeConstraints {
|
||||
#[must_use]
|
||||
pub fn check_constraints(&self) -> Self {
|
||||
let WindowResizeConstraints {
|
||||
mut min_width,
|
||||
mut min_height,
|
||||
|
@ -365,7 +366,7 @@ impl Window {
|
|||
#[inline]
|
||||
pub fn set_position(&mut self, position: IVec2) {
|
||||
self.command_queue
|
||||
.push(WindowCommand::SetPosition { position })
|
||||
.push(WindowCommand::SetPosition { position });
|
||||
}
|
||||
|
||||
/// Modifies the minimum and maximum window bounds for resizing in logical pixels.
|
||||
|
|
|
@ -57,12 +57,13 @@ fn change_window(world: &mut World) {
|
|||
let window = winit_windows.get_window(id).unwrap();
|
||||
match mode {
|
||||
bevy_window::WindowMode::BorderlessFullscreen => {
|
||||
window.set_fullscreen(Some(winit::window::Fullscreen::Borderless(None)))
|
||||
window
|
||||
.set_fullscreen(Some(winit::window::Fullscreen::Borderless(None)));
|
||||
}
|
||||
bevy_window::WindowMode::Fullscreen => {
|
||||
window.set_fullscreen(Some(winit::window::Fullscreen::Exclusive(
|
||||
get_best_videomode(&window.current_monitor().unwrap()),
|
||||
)))
|
||||
)));
|
||||
}
|
||||
bevy_window::WindowMode::SizedFullscreen => window.set_fullscreen(Some(
|
||||
winit::window::Fullscreen::Exclusive(get_fitting_videomode(
|
||||
|
@ -129,11 +130,11 @@ fn change_window(world: &mut World) {
|
|||
}
|
||||
bevy_window::WindowCommand::SetMaximized { maximized } => {
|
||||
let window = winit_windows.get_window(id).unwrap();
|
||||
window.set_maximized(maximized)
|
||||
window.set_maximized(maximized);
|
||||
}
|
||||
bevy_window::WindowCommand::SetMinimized { minimized } => {
|
||||
let window = winit_windows.get_window(id).unwrap();
|
||||
window.set_minimized(minimized)
|
||||
window.set_minimized(minimized);
|
||||
}
|
||||
bevy_window::WindowCommand::SetPosition { position } => {
|
||||
let window = winit_windows.get_window(id).unwrap();
|
||||
|
@ -385,7 +386,7 @@ pub fn winit_runner_with(mut app: App) {
|
|||
char_input_events.send(ReceivedCharacter {
|
||||
id: window_id,
|
||||
char: c,
|
||||
})
|
||||
});
|
||||
}
|
||||
WindowEvent::ScaleFactorChanged {
|
||||
scale_factor,
|
||||
|
|
|
@ -19,7 +19,7 @@ fn main() {
|
|||
.add_startup_system(setup)
|
||||
.add_system(print_sprite_count.label("Tick"))
|
||||
.add_system(move_camera.after("Tick"))
|
||||
.run()
|
||||
.run();
|
||||
}
|
||||
|
||||
fn setup(mut commands: Commands, assets: Res<AssetServer>) {
|
||||
|
|
|
@ -48,7 +48,7 @@ fn setup(
|
|||
mut textures: ResMut<Assets<Image>>,
|
||||
) {
|
||||
let mut texture_atlas_builder = TextureAtlasBuilder::default();
|
||||
for handle in rpg_sprite_handles.handles.iter() {
|
||||
for handle in &rpg_sprite_handles.handles {
|
||||
let texture = textures.get(handle).unwrap();
|
||||
texture_atlas_builder.add_texture(handle.clone_weak().typed::<Image>(), texture);
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ fn setup(mut commands: Commands, asset_server: Res<AssetServer>) {
|
|||
// This system reads from the receiver and sends events to Bevy
|
||||
fn read_stream(receiver: ResMut<StreamReceiver>, mut events: EventWriter<StreamEvent>) {
|
||||
for from_stream in receiver.0.try_iter() {
|
||||
events.send(StreamEvent(from_stream))
|
||||
events.send(StreamEvent(from_stream));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ fn setup(mut commands: Commands) {
|
|||
fn print_when_completed(time: Res<Time>, mut query: Query<&mut PrintOnCompletionTimer>) {
|
||||
for mut timer in query.iter_mut() {
|
||||
if timer.0.tick(time.delta()).just_finished() {
|
||||
info!("Entity timer just finished")
|
||||
info!("Entity timer just finished");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ fn countdown(time: Res<Time>, mut countdown: ResMut<Countdown>) {
|
|||
} else {
|
||||
// The timer has finished so we pause the percent output timer
|
||||
countdown.percent_trigger.pause();
|
||||
info!("Paused percent trigger timer")
|
||||
info!("Paused percent trigger timer");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -793,18 +793,18 @@ mod menu {
|
|||
MenuButtonAction::Quit => app_exit_events.send(AppExit),
|
||||
MenuButtonAction::Play => {
|
||||
game_state.set(GameState::Game).unwrap();
|
||||
menu_state.set(MenuState::Disabled).unwrap()
|
||||
menu_state.set(MenuState::Disabled).unwrap();
|
||||
}
|
||||
MenuButtonAction::Settings => menu_state.set(MenuState::Settings).unwrap(),
|
||||
MenuButtonAction::SettingsDisplay => {
|
||||
menu_state.set(MenuState::SettingsDisplay).unwrap()
|
||||
menu_state.set(MenuState::SettingsDisplay).unwrap();
|
||||
}
|
||||
MenuButtonAction::SettingsSound => {
|
||||
menu_state.set(MenuState::SettingsSound).unwrap()
|
||||
menu_state.set(MenuState::SettingsSound).unwrap();
|
||||
}
|
||||
MenuButtonAction::BackToMainMenu => menu_state.set(MenuState::Main).unwrap(),
|
||||
MenuButtonAction::BackToSettings => {
|
||||
menu_state.set(MenuState::Settings).unwrap()
|
||||
menu_state.set(MenuState::Settings).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ fn setup() {
|
|||
info!(
|
||||
"This is a 'struct' type with an 'x' value of {}",
|
||||
value.get_field::<usize>("x").unwrap()
|
||||
)
|
||||
);
|
||||
}
|
||||
// `TupleStruct` is a trait automatically implemented for tuple structs that derive Reflect.
|
||||
// This trait allows you to interact with fields via their indices
|
||||
|
|
|
@ -53,7 +53,7 @@ fn setup(mut commands: Commands, mut images: ResMut<Assets<Image>>) {
|
|||
});
|
||||
commands.spawn_bundle(OrthographicCameraBundle::new_2d());
|
||||
|
||||
commands.insert_resource(GameOfLifeImage(image))
|
||||
commands.insert_resource(GameOfLifeImage(image));
|
||||
}
|
||||
|
||||
pub struct GameOfLifeComputePlugin;
|
||||
|
|
Loading…
Reference in a new issue