Apply unused_qualifications lint (#14828)

# Objective

Fixes #14782

## Solution

Enable the lint and fix all upcoming hints (`--fix`). Also tried to
figure out the false-positive (see review comment). Maybe split this PR
up into multiple parts where only the last one enables the lint, so some
can already be merged resulting in less many files touched / less
potential for merge conflicts?

Currently, there are some cases where it might be easier to read the
code with the qualifier, so perhaps remove the import of it and adapt
its cases? In the current stage it's just a plain adoption of the
suggestions in order to have a base to discuss.

## Testing

`cargo clippy` and `cargo run -p ci` are happy.
This commit is contained in:
EdJoPaTo 2024-08-21 14:29:33 +02:00 committed by GitHub
parent 7499b74bbf
commit 938d810766
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
71 changed files with 171 additions and 176 deletions

View file

@ -31,14 +31,14 @@ members = [
] ]
[workspace.lints.clippy] [workspace.lints.clippy]
type_complexity = "allow"
doc_markdown = "warn" doc_markdown = "warn"
manual_let_else = "warn" manual_let_else = "warn"
undocumented_unsafe_blocks = "warn"
redundant_else = "warn"
match_same_arms = "warn" match_same_arms = "warn"
semicolon_if_nothing_returned = "warn"
redundant_closure_for_method_calls = "warn" redundant_closure_for_method_calls = "warn"
redundant_else = "warn"
semicolon_if_nothing_returned = "warn"
type_complexity = "allow"
undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn" unwrap_or_default = "warn"
ptr_as_ptr = "warn" ptr_as_ptr = "warn"
@ -46,10 +46,11 @@ ptr_cast_constness = "warn"
ref_as_ptr = "warn" ref_as_ptr = "warn"
[workspace.lints.rust] [workspace.lints.rust]
unsafe_op_in_unsafe_fn = "warn"
missing_docs = "warn" missing_docs = "warn"
unsafe_code = "deny"
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs_dep)'] } unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs_dep)'] }
unsafe_code = "deny"
unsafe_op_in_unsafe_fn = "warn"
unused_qualifications = "warn"
[lints] [lints]
workspace = true workspace = true

View file

@ -1,3 +1,4 @@
use std::mem::size_of;
use bevy_ecs::{ use bevy_ecs::{
component::Component, component::Component,
entity::Entity, entity::Entity,
@ -184,8 +185,7 @@ impl Default for LargeStruct {
} }
pub fn sized_commands_impl<T: Default + Command>(criterion: &mut Criterion) { pub fn sized_commands_impl<T: Default + Command>(criterion: &mut Criterion) {
let mut group = let mut group = criterion.benchmark_group(format!("sized_commands_{}_bytes", size_of::<T>()));
criterion.benchmark_group(format!("sized_commands_{}_bytes", std::mem::size_of::<T>()));
group.warm_up_time(std::time::Duration::from_millis(500)); group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4)); group.measurement_time(std::time::Duration::from_secs(4));

View file

@ -225,7 +225,7 @@ impl AnimationGraph {
) -> impl Iterator<Item = AnimationNodeIndex> + 'a ) -> impl Iterator<Item = AnimationNodeIndex> + 'a
where where
I: IntoIterator<Item = Handle<AnimationClip>>, I: IntoIterator<Item = Handle<AnimationClip>>,
<I as std::iter::IntoIterator>::IntoIter: 'a, <I as IntoIterator>::IntoIter: 'a,
{ {
clips clips
.into_iter() .into_iter()

View file

@ -1074,7 +1074,7 @@ impl From<u8> for AppExit {
} }
impl Termination for AppExit { impl Termination for AppExit {
fn report(self) -> std::process::ExitCode { fn report(self) -> ExitCode {
match self { match self {
AppExit::Success => ExitCode::SUCCESS, AppExit::Success => ExitCode::SUCCESS,
// We leave logging an error to our users // We leave logging an error to our users
@ -1085,7 +1085,7 @@ impl Termination for AppExit {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{iter, marker::PhantomData, mem, sync::Mutex}; use std::{iter, marker::PhantomData, mem::size_of, sync::Mutex};
use bevy_ecs::{ use bevy_ecs::{
change_detection::{DetectChanges, ResMut}, change_detection::{DetectChanges, ResMut},
@ -1411,7 +1411,7 @@ mod tests {
fn app_exit_size() { fn app_exit_size() {
// There wont be many of them so the size isn't a issue but // There wont be many of them so the size isn't a issue but
// it's nice they're so small let's keep it that way. // it's nice they're so small let's keep it that way.
assert_eq!(mem::size_of::<AppExit>(), mem::size_of::<u8>()); assert_eq!(size_of::<AppExit>(), size_of::<u8>());
} }
#[test] #[test]

View file

@ -24,13 +24,13 @@ pub use source::*;
use bevy_utils::{BoxedFuture, ConditionalSendFuture}; use bevy_utils::{BoxedFuture, ConditionalSendFuture};
use futures_io::{AsyncRead, AsyncSeek, AsyncWrite}; use futures_io::{AsyncRead, AsyncSeek, AsyncWrite};
use futures_lite::{ready, Stream}; use futures_lite::{ready, Stream};
use std::io::SeekFrom;
use std::task::Context;
use std::{ use std::{
io::SeekFrom,
mem::size_of,
path::{Path, PathBuf}, path::{Path, PathBuf},
pin::Pin, pin::Pin,
sync::Arc, sync::Arc,
task::Poll, task::{Context, Poll},
}; };
use thiserror::Error; use thiserror::Error;
@ -77,7 +77,7 @@ impl From<std::io::Error> for AssetReaderError {
// Ideally this would be even smaller (ReadToEndFuture only needs space for two references based on its definition), // Ideally this would be even smaller (ReadToEndFuture only needs space for two references based on its definition),
// but compiler optimizations can apparently inflate the stack size of futures due to inlining, which makes // but compiler optimizations can apparently inflate the stack size of futures due to inlining, which makes
// a higher maximum necessary. // a higher maximum necessary.
pub const STACK_FUTURE_SIZE: usize = 10 * std::mem::size_of::<&()>(); pub const STACK_FUTURE_SIZE: usize = 10 * size_of::<&()>();
pub use stackfuture::StackFuture; pub use stackfuture::StackFuture;
@ -520,7 +520,7 @@ impl VecReader {
impl AsyncRead for VecReader { impl AsyncRead for VecReader {
fn poll_read( fn poll_read(
mut self: Pin<&mut Self>, mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut Context<'_>,
buf: &mut [u8], buf: &mut [u8],
) -> Poll<futures_io::Result<usize>> { ) -> Poll<futures_io::Result<usize>> {
if self.bytes_read >= self.bytes.len() { if self.bytes_read >= self.bytes.len() {

View file

@ -137,7 +137,7 @@ impl AsyncRead for TransactionLockedReader<'_> {
mut self: Pin<&mut Self>, mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>, cx: &mut std::task::Context<'_>,
buf: &mut [u8], buf: &mut [u8],
) -> std::task::Poll<futures_io::Result<usize>> { ) -> Poll<futures_io::Result<usize>> {
Pin::new(&mut self.reader).poll_read(cx, buf) Pin::new(&mut self.reader).poll_read(cx, buf)
} }
} }

View file

@ -583,13 +583,10 @@ mod tests {
async fn read_meta<'a>( async fn read_meta<'a>(
&'a self, &'a self,
path: &'a Path, path: &'a Path,
) -> Result<impl bevy_asset::io::Reader + 'a, AssetReaderError> { ) -> Result<impl Reader + 'a, AssetReaderError> {
self.memory_reader.read_meta(path).await self.memory_reader.read_meta(path).await
} }
async fn read<'a>( async fn read<'a>(&'a self, path: &'a Path) -> Result<impl Reader + 'a, AssetReaderError> {
&'a self,
path: &'a Path,
) -> Result<impl bevy_asset::io::Reader + 'a, bevy_asset::io::AssetReaderError> {
let attempt_number = { let attempt_number = {
let mut attempt_counters = self.attempt_counters.lock().unwrap(); let mut attempt_counters = self.attempt_counters.lock().unwrap();
if let Some(existing) = attempt_counters.get_mut(path) { if let Some(existing) = attempt_counters.get_mut(path) {

View file

@ -21,7 +21,7 @@ use thiserror::Error;
/// should be loaded. /// should be loaded.
pub trait AssetLoader: Send + Sync + 'static { pub trait AssetLoader: Send + Sync + 'static {
/// The top level [`Asset`] loaded by this [`AssetLoader`]. /// The top level [`Asset`] loaded by this [`AssetLoader`].
type Asset: crate::Asset; type Asset: Asset;
/// The settings type used by this [`AssetLoader`]. /// The settings type used by this [`AssetLoader`].
type Settings: Settings + Default + Serialize + for<'a> Deserialize<'a>; type Settings: Settings + Default + Serialize + for<'a> Deserialize<'a>;
/// The type of [error](`std::error::Error`) which could be encountered by this loader. /// The type of [error](`std::error::Error`) which could be encountered by this loader.

View file

@ -725,7 +725,7 @@ impl AssetServer {
.data .data
.infos .infos
.write() .write()
.create_loading_handle_untyped(std::any::TypeId::of::<A>(), std::any::type_name::<A>()); .create_loading_handle_untyped(TypeId::of::<A>(), std::any::type_name::<A>());
let id = handle.id(); let id = handle.id();
let event_sender = self.data.asset_event_sender.clone(); let event_sender = self.data.asset_event_sender.clone();

View file

@ -183,7 +183,7 @@ mod tests {
#[test] #[test]
fn test_gray() { fn test_gray() {
verify_gray::<crate::Hsla>(); verify_gray::<Hsla>();
verify_gray::<crate::Hsva>(); verify_gray::<crate::Hsva>();
verify_gray::<crate::Hwba>(); verify_gray::<crate::Hwba>();
verify_gray::<crate::Laba>(); verify_gray::<crate::Laba>();

View file

@ -13,7 +13,7 @@ use bevy_time::{Real, Time};
pub struct FrameTimeDiagnosticsPlugin; pub struct FrameTimeDiagnosticsPlugin;
impl Plugin for FrameTimeDiagnosticsPlugin { impl Plugin for FrameTimeDiagnosticsPlugin {
fn build(&self, app: &mut bevy_app::App) { fn build(&self, app: &mut App) {
app.register_diagnostic(Diagnostic::new(Self::FRAME_TIME).with_suffix("ms")) app.register_diagnostic(Diagnostic::new(Self::FRAME_TIME).with_suffix("ms"))
.register_diagnostic(Diagnostic::new(Self::FPS)) .register_diagnostic(Diagnostic::new(Self::FPS))
.register_diagnostic(Diagnostic::new(Self::FRAME_COUNT).with_smoothing_factor(0.0)) .register_diagnostic(Diagnostic::new(Self::FRAME_COUNT).with_smoothing_factor(0.0))

View file

@ -37,7 +37,7 @@ impl Plugin for DiagnosticsPlugin {
app.init_resource::<DiagnosticsStore>(); app.init_resource::<DiagnosticsStore>();
#[cfg(feature = "sysinfo_plugin")] #[cfg(feature = "sysinfo_plugin")]
app.init_resource::<system_information_diagnostics_plugin::SystemInfo>(); app.init_resource::<SystemInfo>();
} }
} }

View file

@ -101,7 +101,7 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
}); });
} }
None => { None => {
let index = syn::Index::from(i); let index = Index::from(i);
field_get_components.push(quote! { field_get_components.push(quote! {
self.#index.get_components(&mut *func); self.#index.get_components(&mut *func);
}); });

View file

@ -19,7 +19,7 @@ use crate::{
struct QueryDataAttributes { struct QueryDataAttributes {
pub is_mutable: bool, pub is_mutable: bool,
pub derive_args: Punctuated<Meta, syn::token::Comma>, pub derive_args: Punctuated<Meta, Comma>,
} }
static MUTABLE_ATTRIBUTE_NAME: &str = "mutable"; static MUTABLE_ATTRIBUTE_NAME: &str = "mutable";
@ -48,7 +48,7 @@ pub fn derive_query_data_impl(input: TokenStream) -> TokenStream {
} }
attr.parse_args_with(|input: ParseStream| { attr.parse_args_with(|input: ParseStream| {
let meta = input.parse_terminated(syn::Meta::parse, Comma)?; let meta = input.parse_terminated(Meta::parse, Comma)?;
for meta in meta { for meta in meta {
let ident = meta.path().get_ident().unwrap_or_else(|| { let ident = meta.path().get_ident().unwrap_or_else(|| {
panic!( panic!(

View file

@ -706,7 +706,7 @@ impl<'w> BundleInserter<'w> {
location: EntityLocation, location: EntityLocation,
bundle: T, bundle: T,
insert_mode: InsertMode, insert_mode: InsertMode,
#[cfg(feature = "track_change_detection")] caller: &'static core::panic::Location<'static>, #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>,
) -> EntityLocation { ) -> EntityLocation {
let bundle_info = self.bundle_info.as_ref(); let bundle_info = self.bundle_info.as_ref();
let add_bundle = self.add_bundle.as_ref(); let add_bundle = self.add_bundle.as_ref();

View file

@ -940,7 +940,7 @@ pub struct MutUntyped<'w> {
pub(crate) value: PtrMut<'w>, pub(crate) value: PtrMut<'w>,
pub(crate) ticks: TicksMut<'w>, pub(crate) ticks: TicksMut<'w>,
#[cfg(feature = "track_change_detection")] #[cfg(feature = "track_change_detection")]
pub(crate) changed_by: &'w mut &'static core::panic::Location<'static>, pub(crate) changed_by: &'w mut &'static Location<'static>,
} }
impl<'w> MutUntyped<'w> { impl<'w> MutUntyped<'w> {

View file

@ -385,7 +385,7 @@ impl<'de> Deserialize<'de> for Entity {
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
{ {
use serde::de::Error; use serde::de::Error;
let id: u64 = serde::de::Deserialize::deserialize(deserializer)?; let id: u64 = Deserialize::deserialize(deserializer)?;
Entity::try_from_bits(id).map_err(D::Error::custom) Entity::try_from_bits(id).map_err(D::Error::custom)
} }
} }
@ -1004,13 +1004,11 @@ impl EntityLocation {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::mem::size_of;
#[test] #[test]
fn entity_niche_optimization() { fn entity_niche_optimization() {
assert_eq!( assert_eq!(size_of::<Entity>(), size_of::<Option<Entity>>());
std::mem::size_of::<Entity>(),
std::mem::size_of::<Option<Entity>>()
);
} }
#[test] #[test]

View file

@ -35,7 +35,7 @@ impl<'a, T: SparseSetIndex> FormattedBitSet<'a, T> {
} }
} }
impl<'a, T: SparseSetIndex + fmt::Debug> fmt::Debug for FormattedBitSet<'a, T> { impl<'a, T: SparseSetIndex + Debug> Debug for FormattedBitSet<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list() f.debug_list()
.entries(self.bit_set.ones().map(T::get_sparse_set_index)) .entries(self.bit_set.ones().map(T::get_sparse_set_index))
@ -106,7 +106,7 @@ impl<T: SparseSetIndex> Clone for Access<T> {
} }
} }
impl<T: SparseSetIndex + fmt::Debug> fmt::Debug for Access<T> { impl<T: SparseSetIndex + Debug> Debug for Access<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Access") f.debug_struct("Access")
.field( .field(
@ -907,7 +907,7 @@ impl<T: SparseSetIndex> Clone for AccessFilters<T> {
} }
} }
impl<T: SparseSetIndex + fmt::Debug> fmt::Debug for AccessFilters<T> { impl<T: SparseSetIndex + Debug> Debug for AccessFilters<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AccessFilters") f.debug_struct("AccessFilters")
.field("with", &FormattedBitSet::<T>::new(&self.with)) .field("with", &FormattedBitSet::<T>::new(&self.with))

View file

@ -373,7 +373,7 @@ impl<D: QueryData, F: QueryFilter> QueryState<D, F> {
.map(|index| index.keys()) .map(|index| index.keys())
}) })
// select the component with the fewest archetypes // select the component with the fewest archetypes
.min_by_key(std::iter::ExactSizeIterator::len); .min_by_key(ExactSizeIterator::len);
if let Some(archetypes) = potential_archetypes { if let Some(archetypes) = potential_archetypes {
for archetype_id in archetypes { for archetype_id in archetypes {
// exclude archetypes that have already been processed // exclude archetypes that have already been processed

View file

@ -521,7 +521,7 @@ mod tests {
use crate::{component::Component, ptr::OwningPtr, world::World}; use crate::{component::Component, ptr::OwningPtr, world::World};
use super::BlobVec; use super::BlobVec;
use std::{alloc::Layout, cell::RefCell, mem, rc::Rc}; use std::{alloc::Layout, cell::RefCell, mem::align_of, rc::Rc};
unsafe fn drop_ptr<T>(x: OwningPtr<'_>) { unsafe fn drop_ptr<T>(x: OwningPtr<'_>) {
// SAFETY: The pointer points to a valid value of type `T` and it is safe to drop this value. // SAFETY: The pointer points to a valid value of type `T` and it is safe to drop this value.
@ -722,7 +722,7 @@ mod tests {
for zst in q.iter(&world) { for zst in q.iter(&world) {
// Ensure that the references returned are properly aligned. // Ensure that the references returned are properly aligned.
assert_eq!( assert_eq!(
std::ptr::from_ref::<Zst>(zst) as usize % mem::align_of::<Zst>(), std::ptr::from_ref::<Zst>(zst) as usize % align_of::<Zst>(),
0 0
); );
count += 1; count += 1;

View file

@ -165,7 +165,7 @@ impl<const SEND: bool> ResourceData<SEND> {
&mut self, &mut self,
value: OwningPtr<'_>, value: OwningPtr<'_>,
change_tick: Tick, change_tick: Tick,
#[cfg(feature = "track_change_detection")] caller: &'static core::panic::Location, #[cfg(feature = "track_change_detection")] caller: &'static Location,
) { ) {
if self.is_present() { if self.is_present() {
self.validate_access(); self.validate_access();
@ -203,7 +203,7 @@ impl<const SEND: bool> ResourceData<SEND> {
&mut self, &mut self,
value: OwningPtr<'_>, value: OwningPtr<'_>,
change_ticks: ComponentTicks, change_ticks: ComponentTicks,
#[cfg(feature = "track_change_detection")] caller: &'static core::panic::Location, #[cfg(feature = "track_change_detection")] caller: &'static Location,
) { ) {
if self.is_present() { if self.is_present() {
self.validate_access(); self.validate_access();

View file

@ -304,7 +304,7 @@ impl ComponentSparseSet {
pub fn get_changed_by( pub fn get_changed_by(
&self, &self,
entity: Entity, entity: Entity,
) -> Option<&UnsafeCell<&'static core::panic::Location<'static>>> { ) -> Option<&UnsafeCell<&'static Location<'static>>> {
let dense_index = *self.sparse.get(entity.index())?; let dense_index = *self.sparse.get(entity.index())?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
assert_eq!(entity, self.entities[dense_index.as_usize()]); assert_eq!(entity, self.entities[dense_index.as_usize()]);

View file

@ -93,7 +93,7 @@ const _: () = {
type State = FetchState; type State = FetchState;
type Item<'w, 's> = Commands<'w, 's>; type Item<'w, 's> = Commands<'w, 's>;
fn init_state( fn init_state(
world: &mut bevy_ecs::world::World, world: &mut World,
system_meta: &mut bevy_ecs::system::SystemMeta, system_meta: &mut bevy_ecs::system::SystemMeta,
) -> Self::State { ) -> Self::State {
FetchState { FetchState {
@ -120,7 +120,7 @@ const _: () = {
fn apply( fn apply(
state: &mut Self::State, state: &mut Self::State,
system_meta: &bevy_ecs::system::SystemMeta, system_meta: &bevy_ecs::system::SystemMeta,
world: &mut bevy_ecs::world::World, world: &mut World,
) { ) {
<__StructFieldsAlias<'_, '_> as bevy_ecs::system::SystemParam>::apply( <__StructFieldsAlias<'_, '_> as bevy_ecs::system::SystemParam>::apply(
&mut state.state, &mut state.state,
@ -1325,7 +1325,7 @@ where
B: Bundle, B: Bundle,
{ {
#[cfg(feature = "track_change_detection")] #[cfg(feature = "track_change_detection")]
let caller = core::panic::Location::caller(); let caller = Location::caller();
move |world: &mut World| { move |world: &mut World| {
if let Err(invalid_entities) = world.insert_or_spawn_batch_with_caller( if let Err(invalid_entities) = world.insert_or_spawn_batch_with_caller(
bundles_iter, bundles_iter,
@ -1359,7 +1359,7 @@ fn despawn() -> impl EntityCommand {
/// An [`EntityCommand`] that adds the components in a [`Bundle`] to an entity. /// An [`EntityCommand`] that adds the components in a [`Bundle`] to an entity.
#[track_caller] #[track_caller]
fn insert<T: Bundle>(bundle: T, mode: InsertMode) -> impl EntityCommand { fn insert<T: Bundle>(bundle: T, mode: InsertMode) -> impl EntityCommand {
let caller = core::panic::Location::caller(); let caller = Location::caller();
move |entity: Entity, world: &mut World| { move |entity: Entity, world: &mut World| {
if let Some(mut entity) = world.get_entity_mut(entity) { if let Some(mut entity) = world.get_entity_mut(entity) {
entity.insert_with_caller( entity.insert_with_caller(
@ -1379,7 +1379,7 @@ fn insert<T: Bundle>(bundle: T, mode: InsertMode) -> impl EntityCommand {
#[track_caller] #[track_caller]
fn try_insert(bundle: impl Bundle, mode: InsertMode) -> impl EntityCommand { fn try_insert(bundle: impl Bundle, mode: InsertMode) -> impl EntityCommand {
#[cfg(feature = "track_change_detection")] #[cfg(feature = "track_change_detection")]
let caller = core::panic::Location::caller(); let caller = Location::caller();
move |entity, world: &mut World| { move |entity, world: &mut World| {
if let Some(mut entity) = world.get_entity_mut(entity) { if let Some(mut entity) = world.get_entity_mut(entity) {
entity.insert_with_caller( entity.insert_with_caller(

View file

@ -2,7 +2,7 @@ use crate::system::{SystemBuffer, SystemMeta};
use std::{ use std::{
fmt::Debug, fmt::Debug,
mem::MaybeUninit, mem::{size_of, MaybeUninit},
panic::{self, AssertUnwindSafe}, panic::{self, AssertUnwindSafe},
ptr::{addr_of_mut, NonNull}, ptr::{addr_of_mut, NonNull},
}; };
@ -169,7 +169,7 @@ impl RawCommandQueue {
let meta = CommandMeta { let meta = CommandMeta {
consume_command_and_get_size: |command, world, cursor| { consume_command_and_get_size: |command, world, cursor| {
*cursor += std::mem::size_of::<C>(); *cursor += size_of::<C>();
// SAFETY: According to the invariants of `CommandMeta.consume_command_and_get_size`, // SAFETY: According to the invariants of `CommandMeta.consume_command_and_get_size`,
// `command` must point to a value of type `C`. // `command` must point to a value of type `C`.
@ -197,7 +197,7 @@ impl RawCommandQueue {
let old_len = bytes.len(); let old_len = bytes.len();
// Reserve enough bytes for both the metadata and the command itself. // Reserve enough bytes for both the metadata and the command itself.
bytes.reserve(std::mem::size_of::<Packed<C>>()); bytes.reserve(size_of::<Packed<C>>());
// Pointer to the bytes at the end of the buffer. // Pointer to the bytes at the end of the buffer.
// SAFETY: We know it is within bounds of the allocation, due to the call to `.reserve()`. // SAFETY: We know it is within bounds of the allocation, due to the call to `.reserve()`.
@ -217,7 +217,7 @@ impl RawCommandQueue {
// SAFETY: The new length is guaranteed to fit in the vector's capacity, // SAFETY: The new length is guaranteed to fit in the vector's capacity,
// due to the call to `.reserve()` above. // due to the call to `.reserve()` above.
unsafe { unsafe {
bytes.set_len(old_len + std::mem::size_of::<Packed<C>>()); bytes.set_len(old_len + size_of::<Packed<C>>());
} }
} }
@ -252,13 +252,13 @@ impl RawCommandQueue {
}; };
// Advance to the bytes just after `meta`, which represent a type-erased command. // Advance to the bytes just after `meta`, which represent a type-erased command.
local_cursor += std::mem::size_of::<CommandMeta>(); local_cursor += size_of::<CommandMeta>();
// Construct an owned pointer to the command. // Construct an owned pointer to the command.
// SAFETY: It is safe to transfer ownership out of `self.bytes`, since the increment of `cursor` above // SAFETY: It is safe to transfer ownership out of `self.bytes`, since the increment of `cursor` above
// guarantees that nothing stored in the buffer will get observed after this function ends. // guarantees that nothing stored in the buffer will get observed after this function ends.
// `cmd` points to a valid address of a stored command, so it must be non-null. // `cmd` points to a valid address of a stored command, so it must be non-null.
let cmd = unsafe { let cmd = unsafe {
OwningPtr::<Unaligned>::new(std::ptr::NonNull::new_unchecked( OwningPtr::<Unaligned>::new(NonNull::new_unchecked(
self.bytes.as_mut().as_mut_ptr().add(local_cursor).cast(), self.bytes.as_mut().as_mut_ptr().add(local_cursor).cast(),
)) ))
}; };
@ -445,7 +445,7 @@ mod test {
#[test] #[test]
fn test_command_queue_inner_panic_safe() { fn test_command_queue_inner_panic_safe() {
std::panic::set_hook(Box::new(|_| {})); panic::set_hook(Box::new(|_| {}));
let mut queue = CommandQueue::default(); let mut queue = CommandQueue::default();
@ -454,7 +454,7 @@ mod test {
let mut world = World::new(); let mut world = World::new();
let _ = std::panic::catch_unwind(AssertUnwindSafe(|| { let _ = panic::catch_unwind(AssertUnwindSafe(|| {
queue.apply(&mut world); queue.apply(&mut world);
})); }));
@ -468,7 +468,7 @@ mod test {
#[test] #[test]
fn test_command_queue_inner_nested_panic_safe() { fn test_command_queue_inner_nested_panic_safe() {
std::panic::set_hook(Box::new(|_| {})); panic::set_hook(Box::new(|_| {}));
#[derive(Resource, Default)] #[derive(Resource, Default)]
struct Order(Vec<usize>); struct Order(Vec<usize>);
@ -488,7 +488,7 @@ mod test {
}); });
world.commands().add(add_index(4)); world.commands().add(add_index(4));
let _ = std::panic::catch_unwind(AssertUnwindSafe(|| { let _ = panic::catch_unwind(AssertUnwindSafe(|| {
world.flush_commands(); world.flush_commands();
})); }));

View file

@ -54,11 +54,11 @@ unsafe impl SystemParam for WorldId {
type Item<'world, 'state> = WorldId; type Item<'world, 'state> = WorldId;
fn init_state(_: &mut World, _: &mut crate::system::SystemMeta) -> Self::State {} fn init_state(_: &mut World, _: &mut SystemMeta) -> Self::State {}
unsafe fn get_param<'world, 'state>( unsafe fn get_param<'world, 'state>(
_: &'state mut Self::State, _: &'state mut Self::State,
_: &crate::system::SystemMeta, _: &SystemMeta,
world: UnsafeWorldCell<'world>, world: UnsafeWorldCell<'world>,
_: Tick, _: Tick,
) -> Self::Item<'world, 'state> { ) -> Self::Item<'world, 'state> {

View file

@ -2348,7 +2348,7 @@ impl World {
// By setting the change tick in the drop impl, we ensure that // By setting the change tick in the drop impl, we ensure that
// the change tick gets reset even if a panic occurs during the scope. // the change tick gets reset even if a panic occurs during the scope.
impl std::ops::Drop for LastTickGuard<'_> { impl Drop for LastTickGuard<'_> {
fn drop(&mut self) { fn drop(&mut self) {
self.world.last_change_tick = self.last_tick; self.world.last_change_tick = self.last_tick;
} }
@ -3197,7 +3197,7 @@ mod tests {
}; };
assert!(iter.next().is_none()); assert!(iter.next().is_none());
std::mem::drop(iter); drop(iter);
assert_eq!(world.resource::<TestResource>().0, 43); assert_eq!(world.resource::<TestResource>().0, 43);
assert_eq!( assert_eq!(

View file

@ -120,7 +120,7 @@ const LINE_JOINT_SHADER_HANDLE: Handle<Shader> = Handle::weak_from_u128(11627807
pub struct GizmoPlugin; pub struct GizmoPlugin;
impl Plugin for GizmoPlugin { impl Plugin for GizmoPlugin {
fn build(&self, app: &mut bevy_app::App) { fn build(&self, app: &mut App) {
#[cfg(feature = "bevy_render")] #[cfg(feature = "bevy_render")]
{ {
use bevy_asset::load_internal_asset; use bevy_asset::load_internal_asset;
@ -175,7 +175,7 @@ impl Plugin for GizmoPlugin {
} }
#[cfg(feature = "bevy_render")] #[cfg(feature = "bevy_render")]
fn finish(&self, app: &mut bevy_app::App) { fn finish(&self, app: &mut App) {
let Some(render_app) = app.get_sub_app_mut(RenderApp) else { let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
return; return;
}; };

View file

@ -108,7 +108,7 @@ pub enum GltfError {
CircularChildren(String), CircularChildren(String),
/// Failed to load a file. /// Failed to load a file.
#[error("failed to load file: {0}")] #[error("failed to load file: {0}")]
Io(#[from] std::io::Error), Io(#[from] Error),
} }
/// Loads glTF files with all of their data as their corresponding bevy representations. /// Loads glTF files with all of their data as their corresponding bevy representations.
@ -777,7 +777,7 @@ async fn load_gltf<'a, 'b, 'c>(
}) })
} }
fn get_gltf_extras(extras: &gltf::json::Extras) -> Option<GltfExtras> { fn get_gltf_extras(extras: &json::Extras) -> Option<GltfExtras> {
extras.as_ref().map(|extras| GltfExtras { extras.as_ref().map(|extras| GltfExtras {
value: extras.get().to_string(), value: extras.get().to_string(),
}) })
@ -799,9 +799,9 @@ fn node_transform(node: &Node) -> Transform {
rotation, rotation,
scale, scale,
} => Transform { } => Transform {
translation: bevy_math::Vec3::from(translation), translation: Vec3::from(translation),
rotation: bevy_math::Quat::from_array(rotation), rotation: bevy_math::Quat::from_array(rotation),
scale: bevy_math::Vec3::from(scale), scale: Vec3::from(scale),
}, },
} }
} }
@ -849,7 +849,7 @@ async fn load_image<'a, 'b>(
.name() .name()
.map_or("Unknown GLTF Texture".to_string(), ToString::to_string); .map_or("Unknown GLTF Texture".to_string(), ToString::to_string);
match gltf_texture.source().source() { match gltf_texture.source().source() {
gltf::image::Source::View { view, mime_type } => { Source::View { view, mime_type } => {
let start = view.offset(); let start = view.offset();
let end = view.offset() + view.length(); let end = view.offset() + view.length();
let buffer = &buffer_data[view.buffer().index()][start..end]; let buffer = &buffer_data[view.buffer().index()][start..end];
@ -868,7 +868,7 @@ async fn load_image<'a, 'b>(
label: GltfAssetLabel::Texture(gltf_texture.index()), label: GltfAssetLabel::Texture(gltf_texture.index()),
}) })
} }
gltf::image::Source::Uri { uri, mime_type } => { Source::Uri { uri, mime_type } => {
let uri = percent_encoding::percent_decode_str(uri) let uri = percent_encoding::percent_decode_str(uri)
.decode_utf8() .decode_utf8()
.unwrap(); .unwrap();
@ -1697,7 +1697,7 @@ async fn load_buffers(
/// It resolves a Gltf tree and allows for a safe Gltf nodes iteration, /// It resolves a Gltf tree and allows for a safe Gltf nodes iteration,
/// putting dependant nodes before dependencies. /// putting dependant nodes before dependencies.
struct GltfTreeIterator<'a> { struct GltfTreeIterator<'a> {
nodes: Vec<gltf::Node<'a>>, nodes: Vec<Node<'a>>,
} }
impl<'a> GltfTreeIterator<'a> { impl<'a> GltfTreeIterator<'a> {
@ -1784,7 +1784,7 @@ impl<'a> GltfTreeIterator<'a> {
} }
impl<'a> Iterator for GltfTreeIterator<'a> { impl<'a> Iterator for GltfTreeIterator<'a> {
type Item = gltf::Node<'a>; type Item = Node<'a>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
self.nodes.pop() self.nodes.pop()

View file

@ -763,7 +763,7 @@ mod tests {
fn rhombus() { fn rhombus() {
let rhombus = Rhombus::new(2.0, 1.0); let rhombus = Rhombus::new(2.0, 1.0);
let translation = Vec2::new(2.0, 1.0); let translation = Vec2::new(2.0, 1.0);
let rotation = Rot2::radians(std::f32::consts::FRAC_PI_4); let rotation = Rot2::radians(FRAC_PI_4);
let isometry = Isometry2d::new(translation, rotation); let isometry = Isometry2d::new(translation, rotation);
let aabb = rhombus.aabb_2d(isometry); let aabb = rhombus.aabb_2d(isometry);
@ -912,10 +912,7 @@ mod tests {
let rectangle = Rectangle::new(2.0, 1.0); let rectangle = Rectangle::new(2.0, 1.0);
let translation = Vec2::new(2.0, 1.0); let translation = Vec2::new(2.0, 1.0);
let aabb = rectangle.aabb_2d(Isometry2d::new( let aabb = rectangle.aabb_2d(Isometry2d::new(translation, Rot2::radians(FRAC_PI_4)));
translation,
Rot2::radians(std::f32::consts::FRAC_PI_4),
));
let expected_half_size = Vec2::splat(1.0606601); let expected_half_size = Vec2::splat(1.0606601);
assert_eq!(aabb.min, translation - expected_half_size); assert_eq!(aabb.min, translation - expected_half_size);
assert_eq!(aabb.max, translation + expected_half_size); assert_eq!(aabb.max, translation + expected_half_size);

View file

@ -310,7 +310,7 @@ mod tests {
fn rectangle() { fn rectangle() {
let extrusion = Extrusion::new(Rectangle::new(2.0, 1.0), 4.0); let extrusion = Extrusion::new(Rectangle::new(2.0, 1.0), 4.0);
let translation = Vec3::new(3., 4., 5.); let translation = Vec3::new(3., 4., 5.);
let rotation = Quat::from_rotation_z(std::f32::consts::FRAC_PI_4); let rotation = Quat::from_rotation_z(FRAC_PI_4);
let isometry = Isometry3d::new(translation, rotation); let isometry = Isometry3d::new(translation, rotation);
let aabb = extrusion.aabb_3d(isometry); let aabb = extrusion.aabb_3d(isometry);

View file

@ -1748,7 +1748,7 @@ impl RegularPolygon {
/// With a rotation of 0, a vertex will be placed at the top `(0.0, circumradius)`. /// With a rotation of 0, a vertex will be placed at the top `(0.0, circumradius)`.
pub fn vertices(self, rotation: f32) -> impl IntoIterator<Item = Vec2> { pub fn vertices(self, rotation: f32) -> impl IntoIterator<Item = Vec2> {
// Add pi/2 so that the polygon has a vertex at the top (sin is 1.0 and cos is 0.0) // Add pi/2 so that the polygon has a vertex at the top (sin is 1.0 and cos is 0.0)
let start_angle = rotation + std::f32::consts::FRAC_PI_2; let start_angle = rotation + FRAC_PI_2;
let step = std::f32::consts::TAU / self.sides as f32; let step = std::f32::consts::TAU / self.sides as f32;
(0..self.sides).map(move |i| { (0..self.sides).map(move |i| {
@ -1919,7 +1919,7 @@ mod tests {
assert_abs_diff_eq!(rhombus.half_diagonals, Vec2::new(1.0, 1.0)); assert_abs_diff_eq!(rhombus.half_diagonals, Vec2::new(1.0, 1.0));
assert_abs_diff_eq!( assert_abs_diff_eq!(
rhombus.half_diagonals, rhombus.half_diagonals,
Rhombus::from_inradius(std::f32::consts::FRAC_1_SQRT_2).half_diagonals Rhombus::from_inradius(FRAC_1_SQRT_2).half_diagonals
); );
} }
@ -2072,7 +2072,7 @@ mod tests {
let mut rotated_vertices = polygon.vertices(std::f32::consts::FRAC_PI_4).into_iter(); let mut rotated_vertices = polygon.vertices(std::f32::consts::FRAC_PI_4).into_iter();
// Distance from the origin to the middle of a side, derived using Pythagorean theorem // Distance from the origin to the middle of a side, derived using Pythagorean theorem
let side_sistance = std::f32::consts::FRAC_1_SQRT_2; let side_sistance = FRAC_1_SQRT_2;
assert!( assert!(
(rotated_vertices.next().unwrap() - Vec2::new(-side_sistance, side_sistance)).length() (rotated_vertices.next().unwrap() - Vec2::new(-side_sistance, side_sistance)).length()
< 1e-7, < 1e-7,

View file

@ -344,7 +344,7 @@ impl Plugin for PbrPlugin {
PostUpdate, PostUpdate,
( (
add_clusters.in_set(SimulationLightSystems::AddClusters), add_clusters.in_set(SimulationLightSystems::AddClusters),
crate::assign_objects_to_clusters assign_objects_to_clusters
.in_set(SimulationLightSystems::AssignLightsToClusters) .in_set(SimulationLightSystems::AssignLightsToClusters)
.after(TransformSystem::TransformPropagate) .after(TransformSystem::TransformPropagate)
.after(VisibilitySystems::CheckVisibility) .after(VisibilitySystems::CheckVisibility)

View file

@ -637,7 +637,7 @@ pub fn prepare_lights(
// point light shadows and `spot_light_shadow_maps_count` spot light shadow maps, // point light shadows and `spot_light_shadow_maps_count` spot light shadow maps,
// - then by entity as a stable key to ensure that a consistent set of lights are chosen if the light count limit is exceeded. // - then by entity as a stable key to ensure that a consistent set of lights are chosen if the light count limit is exceeded.
point_lights.sort_by(|(entity_1, light_1, _), (entity_2, light_2, _)| { point_lights.sort_by(|(entity_1, light_1, _), (entity_2, light_2, _)| {
crate::cluster::clusterable_object_order( clusterable_object_order(
( (
entity_1, entity_1,
&light_1.shadows_enabled, &light_1.shadows_enabled,

View file

@ -1,4 +1,4 @@
use std::mem; use std::mem::{self, size_of};
use allocator::MeshAllocator; use allocator::MeshAllocator;
use bevy_asset::{load_internal_asset, AssetId}; use bevy_asset::{load_internal_asset, AssetId};
@ -1027,8 +1027,7 @@ pub fn extract_meshes_for_gpu_building(
no_automatic_batching, no_automatic_batching,
); );
let lightmap_uv_rect = let lightmap_uv_rect = pack_lightmap_uv_rect(lightmap.map(|lightmap| lightmap.uv_rect));
lightmap::pack_lightmap_uv_rect(lightmap.map(|lightmap| lightmap.uv_rect));
let gpu_mesh_culling_data = any_gpu_culling.then(|| MeshCullingData::new(aabb)); let gpu_mesh_culling_data = any_gpu_culling.then(|| MeshCullingData::new(aabb));
@ -2376,7 +2375,7 @@ impl<P: PhaseItem> RenderCommand<P> for DrawMesh {
return RenderCommandResult::Skip; return RenderCommandResult::Skip;
} }
Some(buffer) => Some(( Some(buffer) => Some((
index as u64 * mem::size_of::<IndirectParameters>() as u64, index as u64 * size_of::<IndirectParameters>() as u64,
buffer, buffer,
)), )),
}, },

View file

@ -4,13 +4,14 @@ use bevy_math::Mat4;
use bevy_render::{ use bevy_render::{
mesh::morph::MAX_MORPH_WEIGHTS, render_resource::*, renderer::RenderDevice, texture::GpuImage, mesh::morph::MAX_MORPH_WEIGHTS, render_resource::*, renderer::RenderDevice, texture::GpuImage,
}; };
use std::mem::size_of;
use crate::render::skin::MAX_JOINTS; use crate::render::skin::MAX_JOINTS;
const MORPH_WEIGHT_SIZE: usize = std::mem::size_of::<f32>(); const MORPH_WEIGHT_SIZE: usize = size_of::<f32>();
pub const MORPH_BUFFER_SIZE: usize = MAX_MORPH_WEIGHTS * MORPH_WEIGHT_SIZE; pub const MORPH_BUFFER_SIZE: usize = MAX_MORPH_WEIGHTS * MORPH_WEIGHT_SIZE;
const JOINT_SIZE: usize = std::mem::size_of::<Mat4>(); const JOINT_SIZE: usize = size_of::<Mat4>();
pub(crate) const JOINT_BUFFER_SIZE: usize = MAX_JOINTS * JOINT_SIZE; pub(crate) const JOINT_BUFFER_SIZE: usize = MAX_JOINTS * JOINT_SIZE;
/// Individual layout entries. /// Individual layout entries.

View file

@ -1,4 +1,4 @@
use std::{iter, mem}; use std::{iter, mem, mem::size_of};
use bevy_ecs::entity::EntityHashMap; use bevy_ecs::entity::EntityHashMap;
use bevy_ecs::prelude::*; use bevy_ecs::prelude::*;
@ -83,7 +83,7 @@ const WGPU_MIN_ALIGN: usize = 256;
/// Align a [`RawBufferVec`] to `N` bytes by padding the end with `T::default()` values. /// Align a [`RawBufferVec`] to `N` bytes by padding the end with `T::default()` values.
fn add_to_alignment<T: NoUninit + Default>(buffer: &mut RawBufferVec<T>) { fn add_to_alignment<T: NoUninit + Default>(buffer: &mut RawBufferVec<T>) {
let n = WGPU_MIN_ALIGN; let n = WGPU_MIN_ALIGN;
let t_size = mem::size_of::<T>(); let t_size = size_of::<T>();
if !can_align(n, t_size) { if !can_align(n, t_size) {
// This panic is stripped at compile time, due to n, t_size and can_align being const // This panic is stripped at compile time, due to n, t_size and can_align being const
panic!( panic!(
@ -131,7 +131,7 @@ pub fn extract_morphs(
uniform.current_buffer.extend(legal_weights); uniform.current_buffer.extend(legal_weights);
add_to_alignment::<f32>(&mut uniform.current_buffer); add_to_alignment::<f32>(&mut uniform.current_buffer);
let index = (start * mem::size_of::<f32>()) as u32; let index = (start * size_of::<f32>()) as u32;
morph_indices.current.insert(entity, MorphIndex { index }); morph_indices.current.insert(entity, MorphIndex { index });
} }
} }

View file

@ -1,4 +1,4 @@
use std::mem; use std::mem::{self, size_of};
use bevy_asset::Assets; use bevy_asset::Assets;
use bevy_ecs::entity::EntityHashMap; use bevy_ecs::entity::EntityHashMap;
@ -26,7 +26,7 @@ impl SkinIndex {
/// Index to be in address space based on the size of a skin uniform. /// Index to be in address space based on the size of a skin uniform.
const fn new(start: usize) -> Self { const fn new(start: usize) -> Self {
SkinIndex { SkinIndex {
index: (start * std::mem::size_of::<Mat4>()) as u32, index: (start * size_of::<Mat4>()) as u32,
} }
} }
} }

View file

@ -7,9 +7,13 @@
html_favicon_url = "https://bevyengine.org/assets/icon.png" html_favicon_url = "https://bevyengine.org/assets/icon.png"
)] )]
use core::fmt::{self, Formatter, Pointer};
use core::{ use core::{
cell::UnsafeCell, marker::PhantomData, mem::ManuallyDrop, num::NonZeroUsize, ptr::NonNull, cell::UnsafeCell,
fmt::{self, Formatter, Pointer},
marker::PhantomData,
mem::{align_of, ManuallyDrop},
num::NonZeroUsize,
ptr::NonNull,
}; };
/// Used as a type argument to [`Ptr`], [`PtrMut`] and [`OwningPtr`] to specify that the pointer is aligned. /// Used as a type argument to [`Ptr`], [`PtrMut`] and [`OwningPtr`] to specify that the pointer is aligned.
@ -599,7 +603,7 @@ trait DebugEnsureAligned {
impl<T: Sized> DebugEnsureAligned for *mut T { impl<T: Sized> DebugEnsureAligned for *mut T {
#[track_caller] #[track_caller]
fn debug_ensure_aligned(self) -> Self { fn debug_ensure_aligned(self) -> Self {
let align = core::mem::align_of::<T>(); let align = align_of::<T>();
// Implementation shamelessly borrowed from the currently unstable // Implementation shamelessly borrowed from the currently unstable
// ptr.is_aligned_to. // ptr.is_aligned_to.
// //

View file

@ -150,10 +150,7 @@ impl<'a, 'b> WhereClauseOptions<'a, 'b> {
/// // Custom bounds /// // Custom bounds
/// T: MyTrait, /// T: MyTrait,
/// ``` /// ```
pub fn extend_where_clause( pub fn extend_where_clause(&self, where_clause: Option<&WhereClause>) -> TokenStream {
&self,
where_clause: Option<&WhereClause>,
) -> proc_macro2::TokenStream {
// We would normally just use `Self`, but that won't work for generating things like assertion functions // We would normally just use `Self`, but that won't work for generating things like assertion functions
// and trait impls for a type's reference (e.g. `impl FromArg for &MyType`) // and trait impls for a type's reference (e.g. `impl FromArg for &MyType`)
let this = self.meta.type_path().true_type(); let this = self.meta.type_path().true_type();
@ -259,7 +256,7 @@ impl<'a, 'b> WhereClauseOptions<'a, 'b> {
} }
/// The minimum required bounds for a type to be reflected. /// The minimum required bounds for a type to be reflected.
fn required_bounds(&self) -> proc_macro2::TokenStream { fn required_bounds(&self) -> TokenStream {
quote!(#FQAny + #FQSend + #FQSync) quote!(#FQAny + #FQSend + #FQSync)
} }
} }
@ -305,7 +302,7 @@ impl<T> ResultSifter<T> {
} }
/// Turns an `Option<TokenStream>` into a `TokenStream` for an `Option`. /// Turns an `Option<TokenStream>` into a `TokenStream` for an `Option`.
pub(crate) fn wrap_in_option(tokens: Option<proc_macro2::TokenStream>) -> proc_macro2::TokenStream { pub(crate) fn wrap_in_option(tokens: Option<TokenStream>) -> TokenStream {
match tokens { match tokens {
Some(tokens) => quote! { Some(tokens) => quote! {
#FQOption::Some(#tokens) #FQOption::Some(#tokens)
@ -324,11 +321,11 @@ pub(crate) enum StringExpr {
/// This is either a string literal like `"mystring"`, /// This is either a string literal like `"mystring"`,
/// or a string created by a macro like [`module_path`] /// or a string created by a macro like [`module_path`]
/// or [`concat`]. /// or [`concat`].
Const(proc_macro2::TokenStream), Const(TokenStream),
/// A [string slice](str) that is borrowed for a `'static` lifetime. /// A [string slice](str) that is borrowed for a `'static` lifetime.
Borrowed(proc_macro2::TokenStream), Borrowed(TokenStream),
/// An [owned string](String). /// An [owned string](String).
Owned(proc_macro2::TokenStream), Owned(TokenStream),
} }
impl<T: ToString + Spanned> From<T> for StringExpr { impl<T: ToString + Spanned> From<T> for StringExpr {
@ -357,7 +354,7 @@ impl StringExpr {
/// The returned expression will allocate unless the [`StringExpr`] is [already owned]. /// The returned expression will allocate unless the [`StringExpr`] is [already owned].
/// ///
/// [already owned]: StringExpr::Owned /// [already owned]: StringExpr::Owned
pub fn into_owned(self) -> proc_macro2::TokenStream { pub fn into_owned(self) -> TokenStream {
match self { match self {
Self::Const(tokens) | Self::Borrowed(tokens) => quote! { Self::Const(tokens) | Self::Borrowed(tokens) => quote! {
::std::string::ToString::to_string(#tokens) ::std::string::ToString::to_string(#tokens)
@ -367,7 +364,7 @@ impl StringExpr {
} }
/// Returns tokens for a statically borrowed [string slice](str). /// Returns tokens for a statically borrowed [string slice](str).
pub fn into_borrowed(self) -> proc_macro2::TokenStream { pub fn into_borrowed(self) -> TokenStream {
match self { match self {
Self::Const(tokens) | Self::Borrowed(tokens) => tokens, Self::Const(tokens) | Self::Borrowed(tokens) => tokens,
Self::Owned(owned) => quote! { Self::Owned(owned) => quote! {

View file

@ -536,7 +536,7 @@ pub fn array_partial_eq<A: Array + ?Sized>(
/// // ] /// // ]
/// ``` /// ```
#[inline] #[inline]
pub fn array_debug(dyn_array: &dyn Array, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { pub fn array_debug(dyn_array: &dyn Array, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_list(); let mut debug = f.debug_list();
for item in dyn_array.iter() { for item in dyn_array.iter() {
debug.entry(&item as &dyn Debug); debug.entry(&item as &dyn Debug);

View file

@ -1,3 +1,6 @@
// Temporary workaround for impl_reflect!(Option/Result false-positive
#![allow(unused_qualifications)]
use crate::std_traits::ReflectDefault; use crate::std_traits::ReflectDefault;
use crate::utility::{ use crate::utility::{
reflect_hasher, GenericTypeInfoCell, GenericTypePathCell, NonGenericTypeInfoCell, reflect_hasher, GenericTypeInfoCell, GenericTypePathCell, NonGenericTypeInfoCell,
@ -1557,14 +1560,14 @@ impl PartialReflect for Cow<'static, str> {
fn reflect_hash(&self) -> Option<u64> { fn reflect_hash(&self) -> Option<u64> {
let mut hasher = reflect_hasher(); let mut hasher = reflect_hasher();
Hash::hash(&std::any::Any::type_id(self), &mut hasher); Hash::hash(&Any::type_id(self), &mut hasher);
Hash::hash(self, &mut hasher); Hash::hash(self, &mut hasher);
Some(hasher.finish()) Some(hasher.finish())
} }
fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> { fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> {
if let Some(value) = value.try_downcast_ref::<Self>() { if let Some(value) = value.try_downcast_ref::<Self>() {
Some(std::cmp::PartialEq::eq(self, value)) Some(PartialEq::eq(self, value))
} else { } else {
Some(false) Some(false)
} }
@ -1608,7 +1611,7 @@ impl GetTypeRegistration for Cow<'static, str> {
} }
impl FromReflect for Cow<'static, str> { impl FromReflect for Cow<'static, str> {
fn from_reflect(reflect: &dyn crate::PartialReflect) -> Option<Self> { fn from_reflect(reflect: &dyn PartialReflect) -> Option<Self> {
Some(reflect.try_downcast_ref::<Cow<'static, str>>()?.clone()) Some(reflect.try_downcast_ref::<Cow<'static, str>>()?.clone())
} }
} }
@ -1855,14 +1858,14 @@ impl PartialReflect for &'static str {
fn reflect_hash(&self) -> Option<u64> { fn reflect_hash(&self) -> Option<u64> {
let mut hasher = reflect_hasher(); let mut hasher = reflect_hasher();
Hash::hash(&std::any::Any::type_id(self), &mut hasher); Hash::hash(&Any::type_id(self), &mut hasher);
Hash::hash(self, &mut hasher); Hash::hash(self, &mut hasher);
Some(hasher.finish()) Some(hasher.finish())
} }
fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> { fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> {
if let Some(value) = value.try_downcast_ref::<Self>() { if let Some(value) = value.try_downcast_ref::<Self>() {
Some(std::cmp::PartialEq::eq(self, value)) Some(PartialEq::eq(self, value))
} else { } else {
Some(false) Some(false)
} }
@ -1993,14 +1996,14 @@ impl PartialReflect for &'static Path {
fn reflect_hash(&self) -> Option<u64> { fn reflect_hash(&self) -> Option<u64> {
let mut hasher = reflect_hasher(); let mut hasher = reflect_hasher();
Hash::hash(&std::any::Any::type_id(self), &mut hasher); Hash::hash(&Any::type_id(self), &mut hasher);
Hash::hash(self, &mut hasher); Hash::hash(self, &mut hasher);
Some(hasher.finish()) Some(hasher.finish())
} }
fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> { fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> {
if let Some(value) = value.try_downcast_ref::<Self>() { if let Some(value) = value.try_downcast_ref::<Self>() {
Some(std::cmp::PartialEq::eq(self, value)) Some(PartialEq::eq(self, value))
} else { } else {
Some(false) Some(false)
} }
@ -2126,14 +2129,14 @@ impl PartialReflect for Cow<'static, Path> {
fn reflect_hash(&self) -> Option<u64> { fn reflect_hash(&self) -> Option<u64> {
let mut hasher = reflect_hasher(); let mut hasher = reflect_hasher();
Hash::hash(&std::any::Any::type_id(self), &mut hasher); Hash::hash(&Any::type_id(self), &mut hasher);
Hash::hash(self, &mut hasher); Hash::hash(self, &mut hasher);
Some(hasher.finish()) Some(hasher.finish())
} }
fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> { fn reflect_partial_eq(&self, value: &dyn PartialReflect) -> Option<bool> {
if let Some(value) = value.try_downcast_ref::<Self>() { if let Some(value) = value.try_downcast_ref::<Self>() {
Some(std::cmp::PartialEq::eq(self, value)) Some(PartialEq::eq(self, value))
} else { } else {
Some(false) Some(false)
} }
@ -2443,7 +2446,7 @@ mod tests {
let a: &dyn PartialReflect = &std::num::NonZeroUsize::new(42).unwrap(); let a: &dyn PartialReflect = &std::num::NonZeroUsize::new(42).unwrap();
let b: &dyn PartialReflect = &std::num::NonZeroUsize::new(42).unwrap(); let b: &dyn PartialReflect = &std::num::NonZeroUsize::new(42).unwrap();
assert!(a.reflect_partial_eq(b).unwrap_or_default()); assert!(a.reflect_partial_eq(b).unwrap_or_default());
let forty_two: std::num::NonZeroUsize = crate::FromReflect::from_reflect(a).unwrap(); let forty_two: std::num::NonZeroUsize = FromReflect::from_reflect(a).unwrap();
assert_eq!(forty_two, std::num::NonZeroUsize::new(42).unwrap()); assert_eq!(forty_two, std::num::NonZeroUsize::new(42).unwrap());
} }

View file

@ -74,7 +74,7 @@ impl<'a> AccessError<'a> {
self.offset.as_ref() self.offset.as_ref()
} }
} }
impl std::fmt::Display for AccessError<'_> { impl fmt::Display for AccessError<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let AccessError { let AccessError {
kind, kind,

View file

@ -801,7 +801,7 @@ impl Mesh {
// The indices of `other` should start after the last vertex of `self`. // The indices of `other` should start after the last vertex of `self`.
let index_offset = self let index_offset = self
.attribute(Mesh::ATTRIBUTE_POSITION) .attribute(Mesh::ATTRIBUTE_POSITION)
.get_or_insert(&VertexAttributeValues::Float32x3(Vec::default())) .get_or_insert(&Float32x3(Vec::default()))
.len(); .len();
// Extend attributes of `self` with attributes of `other`. // Extend attributes of `self` with attributes of `other`.

View file

@ -11,7 +11,7 @@ use bevy_hierarchy::Children;
use bevy_math::Vec3; use bevy_math::Vec3;
use bevy_reflect::prelude::*; use bevy_reflect::prelude::*;
use bytemuck::{Pod, Zeroable}; use bytemuck::{Pod, Zeroable};
use std::{iter, mem}; use std::{iter, mem::size_of};
use thiserror::Error; use thiserror::Error;
const MAX_TEXTURE_WIDTH: u32 = 2048; const MAX_TEXTURE_WIDTH: u32 = 2048;
@ -84,7 +84,7 @@ impl MorphTargetImage {
}; };
let data = targets let data = targets
.flat_map(|mut attributes| { .flat_map(|mut attributes| {
let layer_byte_count = (padding + component_count) as usize * mem::size_of::<f32>(); let layer_byte_count = (padding + component_count) as usize * size_of::<f32>();
let mut buffer = Vec::with_capacity(layer_byte_count); let mut buffer = Vec::with_capacity(layer_byte_count);
for _ in 0..vertex_count { for _ in 0..vertex_count {
let Some(to_add) = attributes.next() else { let Some(to_add) = attributes.next() else {
@ -93,7 +93,7 @@ impl MorphTargetImage {
buffer.extend_from_slice(bytemuck::bytes_of(&to_add)); buffer.extend_from_slice(bytemuck::bytes_of(&to_add));
} }
// Pad each layer so that they fit width * height // Pad each layer so that they fit width * height
buffer.extend(iter::repeat(0).take(padding as usize * mem::size_of::<f32>())); buffer.extend(iter::repeat(0).take(padding as usize * size_of::<f32>()));
debug_assert_eq!(buffer.len(), layer_byte_count); debug_assert_eq!(buffer.len(), layer_byte_count);
buffer buffer
}) })

View file

@ -487,7 +487,7 @@ impl MeshBuilder for EllipseMeshBuilder {
let mut uvs = Vec::with_capacity(resolution); let mut uvs = Vec::with_capacity(resolution);
// Add pi/2 so that there is a vertex at the top (sin is 1.0 and cos is 0.0) // Add pi/2 so that there is a vertex at the top (sin is 1.0 and cos is 0.0)
let start_angle = std::f32::consts::FRAC_PI_2; let start_angle = FRAC_PI_2;
let step = std::f32::consts::TAU / self.resolution as f32; let step = std::f32::consts::TAU / self.resolution as f32;
for i in 0..self.resolution { for i in 0..self.resolution {
@ -595,7 +595,7 @@ impl MeshBuilder for AnnulusMeshBuilder {
// the vertices at `start_angle` are duplicated for the purposes of UV // the vertices at `start_angle` are duplicated for the purposes of UV
// mapping. Here, each iteration places a pair of vertices at a fixed // mapping. Here, each iteration places a pair of vertices at a fixed
// angle from the center of the annulus. // angle from the center of the annulus.
let start_angle = std::f32::consts::FRAC_PI_2; let start_angle = FRAC_PI_2;
let step = std::f32::consts::TAU / self.resolution as f32; let step = std::f32::consts::TAU / self.resolution as f32;
for i in 0..=self.resolution { for i in 0..=self.resolution {
let theta = start_angle + i as f32 * step; let theta = start_angle + i as f32 * step;

View file

@ -123,7 +123,7 @@ impl SphereMeshBuilder {
let inclination = point.y.acos(); let inclination = point.y.acos();
let azimuth = point.z.atan2(point.x); let azimuth = point.z.atan2(point.x);
let norm_inclination = inclination / std::f32::consts::PI; let norm_inclination = inclination / PI;
let norm_azimuth = 0.5 - (azimuth / std::f32::consts::TAU); let norm_azimuth = 0.5 - (azimuth / std::f32::consts::TAU);
[norm_azimuth, norm_inclination] [norm_azimuth, norm_inclination]

View file

@ -606,7 +606,7 @@ impl<'a> TrackedRenderPass<'a> {
} }
impl WriteTimestamp for TrackedRenderPass<'_> { impl WriteTimestamp for TrackedRenderPass<'_> {
fn write_timestamp(&mut self, query_set: &wgpu::QuerySet, index: u32) { fn write_timestamp(&mut self, query_set: &QuerySet, index: u32) {
self.pass.write_timestamp(query_set, index); self.pass.write_timestamp(query_set, index);
} }
} }

View file

@ -1,4 +1,4 @@
use std::{iter, marker::PhantomData}; use std::{iter, marker::PhantomData, mem::size_of};
use crate::{ use crate::{
render_resource::Buffer, render_resource::Buffer,
@ -53,7 +53,7 @@ impl<T: NoUninit> RawBufferVec<T> {
values: Vec::new(), values: Vec::new(),
buffer: None, buffer: None,
capacity: 0, capacity: 0,
item_size: std::mem::size_of::<T>(), item_size: size_of::<T>(),
buffer_usage, buffer_usage,
label: None, label: None,
changed: false, changed: false,
@ -387,7 +387,7 @@ where
len: 0, len: 0,
buffer: None, buffer: None,
capacity: 0, capacity: 0,
item_size: std::mem::size_of::<T>(), item_size: size_of::<T>(),
buffer_usage, buffer_usage,
label: None, label: None,
label_changed: false, label_changed: false,
@ -444,7 +444,7 @@ where
let size = self.item_size * capacity; let size = self.item_size * capacity;
self.buffer = Some(device.create_buffer(&wgpu::BufferDescriptor { self.buffer = Some(device.create_buffer(&wgpu::BufferDescriptor {
label: self.label.as_deref(), label: self.label.as_deref(),
size: size as wgpu::BufferAddress, size: size as BufferAddress,
usage: BufferUsages::COPY_DST | self.buffer_usage, usage: BufferUsages::COPY_DST | self.buffer_usage,
mapped_at_creation: false, mapped_at_creation: false,
})); }));

View file

@ -32,8 +32,8 @@ use wgpu::{
use crate::render_resource::resource_macros::*; use crate::render_resource::resource_macros::*;
render_resource_wrapper!(ErasedShaderModule, wgpu::ShaderModule); render_resource_wrapper!(ErasedShaderModule, ShaderModule);
render_resource_wrapper!(ErasedPipelineLayout, wgpu::PipelineLayout); render_resource_wrapper!(ErasedPipelineLayout, PipelineLayout);
/// A descriptor for a [`Pipeline`]. /// A descriptor for a [`Pipeline`].
/// ///
@ -316,7 +316,7 @@ impl ShaderCache {
}, },
)?; )?;
wgpu::ShaderSource::Naga(Cow::Owned(naga)) ShaderSource::Naga(Cow::Owned(naga))
} }
}; };

View file

@ -210,7 +210,7 @@ pub async fn initialize_renderer(
let mut limits = options.limits.clone(); let mut limits = options.limits.clone();
if matches!(options.priority, WgpuSettingsPriority::Functionality) { if matches!(options.priority, WgpuSettingsPriority::Functionality) {
features = adapter.features(); features = adapter.features();
if adapter_info.device_type == wgpu::DeviceType::DiscreteGpu { if adapter_info.device_type == DeviceType::DiscreteGpu {
// `MAPPABLE_PRIMARY_BUFFERS` can have a significant, negative performance impact for // `MAPPABLE_PRIMARY_BUFFERS` can have a significant, negative performance impact for
// discrete GPUs due to having to transfer data across the PCI-E bus and so it // discrete GPUs due to having to transfer data across the PCI-E bus and so it
// should not be automatically enabled in this case. It is however beneficial for // should not be automatically enabled in this case. It is however beneficial for

View file

@ -98,7 +98,7 @@ fn fallback_image_new(
render_device.create_texture_with_data( render_device.create_texture_with_data(
render_queue, render_queue,
&image.texture_descriptor, &image.texture_descriptor,
wgpu::util::TextureDataOrder::default(), TextureDataOrder::default(),
&image.data, &image.data,
) )
} else { } else {

View file

@ -496,12 +496,10 @@ pub fn check_visibility<QF>(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use bevy_app::prelude::*;
use bevy_ecs::prelude::*;
use super::*; use super::*;
use bevy_app::prelude::*;
use bevy_hierarchy::BuildChildren; use bevy_hierarchy::BuildChildren;
use std::mem::size_of;
fn visibility_bundle(visibility: Visibility) -> VisibilityBundle { fn visibility_bundle(visibility: Visibility) -> VisibilityBundle {
VisibilityBundle { VisibilityBundle {
@ -763,8 +761,7 @@ mod test {
#[test] #[test]
fn ensure_visibility_enum_size() { fn ensure_visibility_enum_size() {
use std::mem; assert_eq!(1, size_of::<Visibility>());
assert_eq!(1, mem::size_of::<Visibility>()); assert_eq!(1, size_of::<Option<Visibility>>());
assert_eq!(1, mem::size_of::<Option<Visibility>>());
} }
} }

View file

@ -457,7 +457,7 @@ pub fn create_surfaces(
} }
} }
let configuration = wgpu::SurfaceConfiguration { let configuration = SurfaceConfiguration {
format, format,
width: window.physical_width, width: window.physical_width,
height: window.physical_height, height: window.physical_height,

View file

@ -27,7 +27,7 @@ mod sealed {
/// ///
/// It is sealed, and auto implemented for all [`States`] types and /// It is sealed, and auto implemented for all [`States`] types and
/// tuples containing them. /// tuples containing them.
pub trait StateSet: sealed::StateSetSealed { pub trait StateSet: StateSetSealed {
/// The total [`DEPENDENCY_DEPTH`](`States::DEPENDENCY_DEPTH`) of all /// The total [`DEPENDENCY_DEPTH`](`States::DEPENDENCY_DEPTH`) of all
/// the states that are part of this [`StateSet`], added together. /// the states that are part of this [`StateSet`], added together.
/// ///

View file

@ -113,7 +113,7 @@ impl Plugin for TextPlugin {
.in_set(VisibilitySystems::CalculateBounds) .in_set(VisibilitySystems::CalculateBounds)
.after(update_text2d_layout), .after(update_text2d_layout),
update_text2d_layout update_text2d_layout
.after(font_atlas_set::remove_dropped_font_atlas_sets) .after(remove_dropped_font_atlas_sets)
// Potential conflict: `Assets<Image>` // Potential conflict: `Assets<Image>`
// In practice, they run independently since `bevy_render::camera_update_system` // In practice, they run independently since `bevy_render::camera_update_system`
// will only ever observe its own render target, and `update_text2d_layout` // will only ever observe its own render target, and `update_text2d_layout`

View file

@ -304,7 +304,7 @@ pub struct TextMeasureInfo {
pub min: Vec2, pub min: Vec2,
/// Maximum size for a text area in pixels, to be used when laying out widgets with taffy /// Maximum size for a text area in pixels, to be used when laying out widgets with taffy
pub max: Vec2, pub max: Vec2,
buffer: cosmic_text::Buffer, buffer: Buffer,
} }
impl std::fmt::Debug for TextMeasureInfo { impl std::fmt::Debug for TextMeasureInfo {

View file

@ -134,7 +134,7 @@ pub fn once_after_delay(duration: Duration) -> impl FnMut(Res<Time>) -> bool + C
/// } /// }
/// ``` /// ```
pub fn once_after_real_delay(duration: Duration) -> impl FnMut(Res<Time<Real>>) -> bool + Clone { pub fn once_after_real_delay(duration: Duration) -> impl FnMut(Res<Time<Real>>) -> bool + Clone {
let mut timer = Timer::new(duration, crate::TimerMode::Once); let mut timer = Timer::new(duration, TimerMode::Once);
move |time: Res<Time<Real>>| { move |time: Res<Time<Real>>| {
timer.tick(time.delta()); timer.tick(time.delta());
timer.just_finished() timer.just_finished()

View file

@ -376,7 +376,7 @@ pub struct NoOpHasher(u64);
// This is for types that already contain a high-quality hash and want to skip // This is for types that already contain a high-quality hash and want to skip
// re-hashing that hash. // re-hashing that hash.
impl std::hash::Hasher for NoOpHasher { impl Hasher for NoOpHasher {
fn finish(&self) -> u64 { fn finish(&self) -> u64 {
self.0 self.0
} }
@ -506,7 +506,7 @@ mod tests {
fn write_u64(&mut self, _: u64) {} fn write_u64(&mut self, _: u64) {}
} }
std::hash::Hash::hash(&TypeId::of::<()>(), &mut Hasher); Hash::hash(&TypeId::of::<()>(), &mut Hasher);
} }
#[test] #[test]

View file

@ -405,7 +405,7 @@ impl Window {
/// ///
/// See [`WindowResolution`] for an explanation about logical/physical sizes. /// See [`WindowResolution`] for an explanation about logical/physical sizes.
#[inline] #[inline]
pub fn physical_size(&self) -> bevy_math::UVec2 { pub fn physical_size(&self) -> UVec2 {
self.resolution.physical_size() self.resolution.physical_size()
} }

View file

@ -286,7 +286,7 @@ pub fn convert_physical_key_code(virtual_key_code: winit::keyboard::PhysicalKey)
} }
} }
pub fn convert_logical_key(logical_key_code: &winit::keyboard::Key) -> bevy_input::keyboard::Key { pub fn convert_logical_key(logical_key_code: &Key) -> bevy_input::keyboard::Key {
match logical_key_code { match logical_key_code {
Key::Character(s) => bevy_input::keyboard::Key::Character(s.clone()), Key::Character(s) => bevy_input::keyboard::Key::Character(s.clone()),
Key::Unidentified(nk) => bevy_input::keyboard::Key::Unidentified(convert_native_key(nk)), Key::Unidentified(nk) => bevy_input::keyboard::Key::Unidentified(convert_native_key(nk)),

View file

@ -159,7 +159,7 @@ pub struct WakeUp;
/// ///
/// Use `Res<EventLoopProxy>` to receive this resource. /// Use `Res<EventLoopProxy>` to receive this resource.
#[derive(Resource, Deref)] #[derive(Resource, Deref)]
pub struct EventLoopProxyWrapper<T: 'static>(winit::event_loop::EventLoopProxy<T>); pub struct EventLoopProxyWrapper<T: 'static>(EventLoopProxy<T>);
trait AppSendEvent { trait AppSendEvent {
fn send(&mut self, event: impl Into<WinitEvent>); fn send(&mut self, event: impl Into<WinitEvent>);

View file

@ -53,7 +53,7 @@ fn setup(
}, },
Skybox { Skybox {
image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"),
brightness: bevy::pbr::light_consts::lux::DIRECT_SUNLIGHT, brightness: light_consts::lux::DIRECT_SUNLIGHT,
..default() ..default()
}, },
)); ));

View file

@ -3,7 +3,7 @@
//! This example show how you can create components dynamically, spawn entities with those components //! This example show how you can create components dynamically, spawn entities with those components
//! as well as query for entities with those components. //! as well as query for entities with those components.
use std::{alloc::Layout, io::Write, ptr::NonNull}; use std::{alloc::Layout, io::Write, mem::size_of, ptr::NonNull};
use bevy::prelude::*; use bevy::prelude::*;
use bevy::{ use bevy::{
@ -118,7 +118,7 @@ fn main() {
// Calculate the length for the array based on the layout created for this component id // Calculate the length for the array based on the layout created for this component id
let info = world.components().get_info(id).unwrap(); let info = world.components().get_info(id).unwrap();
let len = info.layout().size() / std::mem::size_of::<u64>(); let len = info.layout().size() / size_of::<u64>();
let mut values: Vec<u64> = component let mut values: Vec<u64> = component
.take(len) .take(len)
.filter_map(|value| value.parse::<u64>().ok()) .filter_map(|value| value.parse::<u64>().ok())
@ -155,7 +155,7 @@ fn main() {
.map(|id| { .map(|id| {
let ptr = filtered_entity.get_by_id(id).unwrap(); let ptr = filtered_entity.get_by_id(id).unwrap();
let info = component_info.get(&id).unwrap(); let info = component_info.get(&id).unwrap();
let len = info.layout().size() / std::mem::size_of::<u64>(); let len = info.layout().size() / size_of::<u64>();
// SAFETY: // SAFETY:
// - All components are created with layout [u64] // - All components are created with layout [u64]

View file

@ -107,7 +107,7 @@ fn take_damage(
trigger: Trigger<Attack>, trigger: Trigger<Attack>,
mut hp: Query<(&mut HitPoints, &Name)>, mut hp: Query<(&mut HitPoints, &Name)>,
mut commands: Commands, mut commands: Commands,
mut app_exit: EventWriter<bevy::app::AppExit>, mut app_exit: EventWriter<AppExit>,
) { ) {
let attack = trigger.event(); let attack = trigger.event();
let (mut hp, name) = hp.get_mut(trigger.entity()).unwrap(); let (mut hp, name) = hp.get_mut(trigger.entity()).unwrap();
@ -118,7 +118,7 @@ fn take_damage(
} else { } else {
warn!("💀 {} has died a gruesome death", name); warn!("💀 {} has died a gruesome death", name);
commands.entity(trigger.entity()).despawn_recursive(); commands.entity(trigger.entity()).despawn_recursive();
app_exit.send(bevy::app::AppExit::Success); app_exit.send(AppExit::Success);
} }
info!("(propagation reached root)\n"); info!("(propagation reached root)\n");

View file

@ -332,7 +332,7 @@ mod pipelines_ready {
// and then update the pipelines status from there. // and then update the pipelines status from there.
// Writing between these Apps can only be done through the // Writing between these Apps can only be done through the
// `ExtractSchedule`. // `ExtractSchedule`.
app.sub_app_mut(bevy::render::RenderApp) app.sub_app_mut(RenderApp)
.add_systems(ExtractSchedule, update_pipelines_ready); .add_systems(ExtractSchedule, update_pipelines_ready);
} }
} }

View file

@ -473,7 +473,7 @@ impl MeshBuilder for HeartMeshBuilder {
// The `Extrudable` trait can be used to easily implement meshing for extrusions. // The `Extrudable` trait can be used to easily implement meshing for extrusions.
impl Extrudable for HeartMeshBuilder { impl Extrudable for HeartMeshBuilder {
fn perimeter(&self) -> Vec<bevy::render::mesh::PerimeterSegment> { fn perimeter(&self) -> Vec<PerimeterSegment> {
let resolution = self.resolution as u32; let resolution = self.resolution as u32;
vec![ vec![
// The left wing of the heart // The left wing of the heart

View file

@ -174,7 +174,7 @@ impl Shape {
impl ShapeSample for Shape { impl ShapeSample for Shape {
type Output = Vec3; type Output = Vec3;
fn sample_interior<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> Vec3 { fn sample_interior<R: Rng + ?Sized>(&self, rng: &mut R) -> Vec3 {
match self { match self {
Shape::Cuboid => CUBOID.sample_interior(rng), Shape::Cuboid => CUBOID.sample_interior(rng),
Shape::Sphere => SPHERE.sample_interior(rng), Shape::Sphere => SPHERE.sample_interior(rng),
@ -185,7 +185,7 @@ impl ShapeSample for Shape {
} }
} }
fn sample_boundary<R: rand::prelude::Rng + ?Sized>(&self, rng: &mut R) -> Self::Output { fn sample_boundary<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::Output {
match self { match self {
Shape::Cuboid => CUBOID.sample_boundary(rng), Shape::Cuboid => CUBOID.sample_boundary(rng),
Shape::Sphere => SPHERE.sample_boundary(rng), Shape::Sphere => SPHERE.sample_boundary(rng),

View file

@ -7,8 +7,6 @@
//! into Bevy—render nodes are another, lower-level method—but it does allow //! into Bevy—render nodes are another, lower-level method—but it does allow
//! for better reuse of parts of Bevy's built-in mesh rendering logic. //! for better reuse of parts of Bevy's built-in mesh rendering logic.
use std::mem;
use bevy::{ use bevy::{
core_pipeline::core_3d::{Opaque3d, Opaque3dBinKey, CORE_3D_DEPTH_FORMAT}, core_pipeline::core_3d::{Opaque3d, Opaque3dBinKey, CORE_3D_DEPTH_FORMAT},
ecs::{ ecs::{
@ -38,6 +36,7 @@ use bevy::{
}, },
}; };
use bytemuck::{Pod, Zeroable}; use bytemuck::{Pod, Zeroable};
use std::mem::size_of;
/// A marker component that represents an entity that is to be rendered using /// A marker component that represents an entity that is to be rendered using
/// our custom phase item. /// our custom phase item.
@ -302,7 +301,7 @@ impl SpecializedRenderPipeline for CustomPhasePipeline {
shader_defs: vec![], shader_defs: vec![],
entry_point: "vertex".into(), entry_point: "vertex".into(),
buffers: vec![VertexBufferLayout { buffers: vec![VertexBufferLayout {
array_stride: mem::size_of::<Vertex>() as u64, array_stride: size_of::<Vertex>() as u64,
step_mode: VertexStepMode::Vertex, step_mode: VertexStepMode::Vertex,
// This needs to match the layout of [`Vertex`]. // This needs to match the layout of [`Vertex`].
attributes: vec![ attributes: vec![

View file

@ -17,6 +17,7 @@ use bevy::{
}, },
}; };
use crossbeam_channel::{Receiver, Sender}; use crossbeam_channel::{Receiver, Sender};
use std::mem::size_of;
/// This example uses a shader source file from the assets subdirectory /// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/gpu_readback.wgsl"; const SHADER_ASSET_PATH: &str = "shaders/gpu_readback.wgsl";
@ -129,7 +130,7 @@ impl FromWorld for Buffers {
// copy the buffer modified by the GPU into a mappable, CPU-accessible buffer // copy the buffer modified by the GPU into a mappable, CPU-accessible buffer
let cpu_buffer = render_device.create_buffer(&BufferDescriptor { let cpu_buffer = render_device.create_buffer(&BufferDescriptor {
label: Some("readback_buffer"), label: Some("readback_buffer"),
size: (BUFFER_LEN * std::mem::size_of::<u32>()) as u64, size: (BUFFER_LEN * size_of::<u32>()) as u64,
usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST, usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST,
mapped_at_creation: false, mapped_at_creation: false,
}); });
@ -250,7 +251,7 @@ fn map_and_read_buffer(
{ {
let buffer_view = buffer_slice.get_mapped_range(); let buffer_view = buffer_slice.get_mapped_range();
let data = buffer_view let data = buffer_view
.chunks(std::mem::size_of::<u32>()) .chunks(size_of::<u32>())
.map(|chunk| u32::from_ne_bytes(chunk.try_into().expect("should be a u32"))) .map(|chunk| u32::from_ne_bytes(chunk.try_into().expect("should be a u32")))
.collect::<Vec<u32>>(); .collect::<Vec<u32>>();
sender sender
@ -306,7 +307,7 @@ impl render_graph::Node for ComputeNode {
0, 0,
&buffers.cpu_buffer, &buffers.cpu_buffer,
0, 0,
(BUFFER_LEN * std::mem::size_of::<u32>()) as u64, (BUFFER_LEN * size_of::<u32>()) as u64,
); );
Ok(()) Ok(())

View file

@ -27,6 +27,7 @@ use bevy::{
}, },
}; };
use bytemuck::{Pod, Zeroable}; use bytemuck::{Pod, Zeroable};
use std::mem::size_of;
/// This example uses a shader source file from the assets subdirectory /// This example uses a shader source file from the assets subdirectory
const SHADER_ASSET_PATH: &str = "shaders/instancing.wgsl"; const SHADER_ASSET_PATH: &str = "shaders/instancing.wgsl";
@ -212,7 +213,7 @@ impl SpecializedMeshPipeline for CustomPipeline {
descriptor.vertex.shader = self.shader.clone(); descriptor.vertex.shader = self.shader.clone();
descriptor.vertex.buffers.push(VertexBufferLayout { descriptor.vertex.buffers.push(VertexBufferLayout {
array_stride: std::mem::size_of::<InstanceData>() as u64, array_stride: size_of::<InstanceData>() as u64,
step_mode: VertexStepMode::Instance, step_mode: VertexStepMode::Instance,
attributes: vec![ attributes: vec![
VertexAttribute { VertexAttribute {

View file

@ -284,7 +284,7 @@ fn main() {
let reports_path = "example-showcase-reports"; let reports_path = "example-showcase-reports";
if report_details { if report_details {
std::fs::create_dir(reports_path) fs::create_dir(reports_path)
.expect("Failed to create example-showcase-reports directory"); .expect("Failed to create example-showcase-reports directory");
} }