mirror of
https://github.com/bevyengine/bevy
synced 2024-11-25 06:00:20 +00:00
Fix errors and panics to typical Rust conventions (#968)
Fix errors and panics to typical Rust conventions
This commit is contained in:
parent
f54eb12efc
commit
9f4c8b1b9a
65 changed files with 171 additions and 161 deletions
|
@ -38,7 +38,10 @@ impl PluginGroupBuilder {
|
|||
.find(|(_i, ty)| **ty == TypeId::of::<Target>())
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or_else(|| {
|
||||
panic!("Plugin does not exist: {}", std::any::type_name::<Target>())
|
||||
panic!(
|
||||
"Plugin does not exist: {}.",
|
||||
std::any::type_name::<Target>()
|
||||
)
|
||||
});
|
||||
self.order.insert(target_index, TypeId::of::<T>());
|
||||
self.plugins.insert(
|
||||
|
@ -59,7 +62,10 @@ impl PluginGroupBuilder {
|
|||
.find(|(_i, ty)| **ty == TypeId::of::<Target>())
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or_else(|| {
|
||||
panic!("Plugin does not exist: {}", std::any::type_name::<Target>())
|
||||
panic!(
|
||||
"Plugin does not exist: {}.",
|
||||
std::any::type_name::<Target>()
|
||||
)
|
||||
});
|
||||
self.order.insert(target_index + 1, TypeId::of::<T>());
|
||||
self.plugins.insert(
|
||||
|
@ -76,7 +82,7 @@ impl PluginGroupBuilder {
|
|||
let mut plugin_entry = self
|
||||
.plugins
|
||||
.get_mut(&TypeId::of::<T>())
|
||||
.expect("Cannot enable a plugin that does not exist");
|
||||
.expect("Cannot enable a plugin that does not exist.");
|
||||
plugin_entry.enabled = true;
|
||||
self
|
||||
}
|
||||
|
@ -85,7 +91,7 @@ impl PluginGroupBuilder {
|
|||
let mut plugin_entry = self
|
||||
.plugins
|
||||
.get_mut(&TypeId::of::<T>())
|
||||
.expect("Cannot disable a plugin that does not exist");
|
||||
.expect("Cannot disable a plugin that does not exist.");
|
||||
plugin_entry.enabled = false;
|
||||
self
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ impl Plugin for ScheduleRunnerPlugin {
|
|||
f.as_ref().unchecked_ref(),
|
||||
dur.as_millis() as i32,
|
||||
)
|
||||
.expect("should register `setTimeout`");
|
||||
.expect("Should register `setTimeout`.");
|
||||
}
|
||||
let asap = Duration::from_millis(1);
|
||||
|
||||
|
|
|
@ -16,15 +16,15 @@ use thiserror::Error;
|
|||
/// Errors that occur while loading assets with an AssetServer
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AssetServerError {
|
||||
#[error("Asset folder path is not a directory.")]
|
||||
#[error("asset folder path is not a directory")]
|
||||
AssetFolderNotADirectory(String),
|
||||
#[error("No AssetLoader found for the given extension.")]
|
||||
#[error("no AssetLoader found for the given extension")]
|
||||
MissingAssetLoader(Option<String>),
|
||||
#[error("The given type does not match the type of the loaded asset.")]
|
||||
#[error("the given type does not match the type of the loaded asset")]
|
||||
IncorrectHandleType,
|
||||
#[error("Encountered an error while loading an asset.")]
|
||||
#[error("encountered an error while loading an asset")]
|
||||
AssetLoaderError(anyhow::Error),
|
||||
#[error("PathLoader encountered an error")]
|
||||
#[error("`PathLoader` encountered an error")]
|
||||
PathLoaderError(#[from] AssetIoError),
|
||||
}
|
||||
|
||||
|
@ -238,7 +238,7 @@ impl AssetServer {
|
|||
let mut asset_sources = self.server.asset_sources.write();
|
||||
let source_info = asset_sources
|
||||
.get_mut(&asset_path_id.source_path_id())
|
||||
.expect("AssetSource should exist at this point");
|
||||
.expect("`AssetSource` should exist at this point.");
|
||||
if version != source_info.version {
|
||||
return Ok(asset_path_id);
|
||||
}
|
||||
|
@ -317,7 +317,7 @@ impl AssetServer {
|
|||
continue;
|
||||
}
|
||||
let handle =
|
||||
self.load_untyped(child_path.to_str().expect("Path should be a valid string"));
|
||||
self.load_untyped(child_path.to_str().expect("Path should be a valid string."));
|
||||
handles.push(handle);
|
||||
}
|
||||
}
|
||||
|
@ -334,7 +334,7 @@ impl AssetServer {
|
|||
let ref_change = match receiver.try_recv() {
|
||||
Ok(ref_change) => ref_change,
|
||||
Err(TryRecvError::Empty) => break,
|
||||
Err(TryRecvError::Disconnected) => panic!("RefChange channel disconnected"),
|
||||
Err(TryRecvError::Disconnected) => panic!("RefChange channel disconnected."),
|
||||
};
|
||||
match ref_change {
|
||||
RefChange::Increment(handle_id) => *ref_counts.entry(handle_id).or_insert(0) += 1,
|
||||
|
@ -377,7 +377,7 @@ impl AssetServer {
|
|||
let asset_value = asset
|
||||
.value
|
||||
.take()
|
||||
.expect("Asset should exist at this point");
|
||||
.expect("Asset should exist at this point.");
|
||||
if let Some(asset_lifecycle) = asset_lifecycles.get(&asset_value.type_uuid()) {
|
||||
let asset_path =
|
||||
AssetPath::new_ref(&load_context.path, label.as_ref().map(|l| l.as_str()));
|
||||
|
@ -431,7 +431,7 @@ impl AssetServer {
|
|||
Err(TryRecvError::Empty) => {
|
||||
break;
|
||||
}
|
||||
Err(TryRecvError::Disconnected) => panic!("AssetChannel disconnected"),
|
||||
Err(TryRecvError::Disconnected) => panic!("AssetChannel disconnected."),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,9 +12,9 @@ impl Default for FilesystemWatcher {
|
|||
fn default() -> Self {
|
||||
let (sender, receiver) = crossbeam_channel::unbounded();
|
||||
let watcher: RecommendedWatcher = Watcher::new_immediate(move |res| {
|
||||
sender.send(res).expect("Watch event send failure");
|
||||
sender.send(res).expect("Watch event send failure.");
|
||||
})
|
||||
.expect("Failed to create filesystem watcher");
|
||||
.expect("Failed to create filesystem watcher.");
|
||||
FilesystemWatcher { watcher, receiver }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -274,7 +274,7 @@ impl HandleUntyped {
|
|||
pub fn typed<T: Asset>(mut self) -> Handle<T> {
|
||||
if let HandleId::Id(type_uuid, _) = self.id {
|
||||
if T::TYPE_UUID != type_uuid {
|
||||
panic!("attempted to convert handle to invalid type");
|
||||
panic!("Attempted to convert handle to invalid type.");
|
||||
}
|
||||
}
|
||||
let handle_type = match &self.handle_type {
|
||||
|
|
|
@ -122,7 +122,7 @@ pub fn filesystem_watcher_system(asset_server: Res<AssetServer>) {
|
|||
let event = match watcher.receiver.try_recv() {
|
||||
Ok(result) => result.unwrap(),
|
||||
Err(TryRecvError::Empty) => break,
|
||||
Err(TryRecvError::Disconnected) => panic!("FilesystemWatcher disconnected"),
|
||||
Err(TryRecvError::Disconnected) => panic!("FilesystemWatcher disconnected."),
|
||||
};
|
||||
if let notify::event::Event {
|
||||
kind: notify::event::EventKind::Modify(_),
|
||||
|
|
|
@ -24,11 +24,11 @@ use thiserror::Error;
|
|||
/// Errors that occur while loading assets
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AssetIoError {
|
||||
#[error("Path not found")]
|
||||
#[error("path not found")]
|
||||
NotFound(PathBuf),
|
||||
#[error("Encountered an io error while loading asset.")]
|
||||
#[error("encountered an io error while loading asset")]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Failed to watch path")]
|
||||
#[error("failed to watch path")]
|
||||
PathWatchError(PathBuf),
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ impl Plugin for AssetPlugin {
|
|||
let task_pool = app
|
||||
.resources()
|
||||
.get::<IoTaskPool>()
|
||||
.expect("IoTaskPool resource not found")
|
||||
.expect("`IoTaskPool` resource not found.")
|
||||
.0
|
||||
.clone();
|
||||
|
||||
|
|
|
@ -152,7 +152,10 @@ impl<T: AssetDynamic> AssetLifecycle for AssetLifecycleChannel<T> {
|
|||
}))
|
||||
.unwrap()
|
||||
} else {
|
||||
panic!("failed to downcast asset to {}", std::any::type_name::<T>());
|
||||
panic!(
|
||||
"Failed to downcast asset to {}.",
|
||||
std::any::type_name::<T>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ impl<'a, 'b> From<&'a AssetPath<'b>> for AssetPathId {
|
|||
impl<'a> From<&'a str> for AssetPath<'a> {
|
||||
fn from(asset_path: &'a str) -> Self {
|
||||
let mut parts = asset_path.split('#');
|
||||
let path = Path::new(parts.next().expect("path must be set"));
|
||||
let path = Path::new(parts.next().expect("Path must be set."));
|
||||
let label = parts.next();
|
||||
AssetPath {
|
||||
path: Cow::Borrowed(path),
|
||||
|
|
|
@ -5,7 +5,7 @@ use syn::{parse_macro_input, ItemFn};
|
|||
pub fn bevy_main(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(item as ItemFn);
|
||||
if input.sig.ident != "main" {
|
||||
panic!("bevy_main can only be used on a function called 'main'")
|
||||
panic!("`bevy_main` can only be used on a function called 'main'.")
|
||||
}
|
||||
|
||||
TokenStream::from(quote! {
|
||||
|
|
|
@ -10,7 +10,7 @@ pub fn derive_bytes(input: TokenStream) -> TokenStream {
|
|||
fields: Fields::Named(fields),
|
||||
..
|
||||
}) => &fields.named,
|
||||
_ => panic!("expected a struct with named fields"),
|
||||
_ => panic!("Expected a struct with named fields."),
|
||||
};
|
||||
|
||||
let modules = get_modules(&ast.attrs);
|
||||
|
|
|
@ -36,7 +36,7 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
|
|||
}
|
||||
Ok(())
|
||||
})
|
||||
.expect("invalid 'render_resources' attribute format");
|
||||
.expect("Invalid 'render_resources' attribute format.");
|
||||
|
||||
attributes
|
||||
});
|
||||
|
@ -77,7 +77,7 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
|
|||
fields: Fields::Named(fields),
|
||||
..
|
||||
}) => &fields.named,
|
||||
_ => panic!("expected a struct with named fields"),
|
||||
_ => panic!("Expected a struct with named fields."),
|
||||
};
|
||||
let field_attributes = fields
|
||||
.iter()
|
||||
|
@ -102,7 +102,7 @@ pub fn derive_render_resources(input: TokenStream) -> TokenStream {
|
|||
}
|
||||
Ok(())
|
||||
})
|
||||
.expect("invalid 'render_resources' attribute format");
|
||||
.expect("Invalid 'render_resources' attribute format.");
|
||||
|
||||
attributes
|
||||
}),
|
||||
|
|
|
@ -10,7 +10,7 @@ pub fn derive_from_resources(input: TokenStream) -> TokenStream {
|
|||
fields: Fields::Named(fields),
|
||||
..
|
||||
}) => &fields.named,
|
||||
_ => panic!("expected a struct with named fields"),
|
||||
_ => panic!("Expected a struct with named fields."),
|
||||
};
|
||||
|
||||
let modules = get_modules(&ast.attrs);
|
||||
|
|
|
@ -17,7 +17,7 @@ pub fn derive_shader_defs(input: TokenStream) -> TokenStream {
|
|||
fields: Fields::Named(fields),
|
||||
..
|
||||
}) => &fields.named,
|
||||
_ => panic!("expected a struct with named fields"),
|
||||
_ => panic!("Expected a struct with named fields."),
|
||||
};
|
||||
|
||||
let shader_def_idents = fields
|
||||
|
|
|
@ -357,7 +357,7 @@ pub fn derive_system_param(input: TokenStream) -> TokenStream {
|
|||
fields: Fields::Named(fields),
|
||||
..
|
||||
}) => &fields.named,
|
||||
_ => panic!("expected a struct with named fields"),
|
||||
_ => panic!("Expected a struct with named fields."),
|
||||
};
|
||||
|
||||
let manifest = Manifest::new().unwrap();
|
||||
|
@ -386,7 +386,7 @@ pub fn derive_system_param(input: TokenStream) -> TokenStream {
|
|||
}
|
||||
Ok(())
|
||||
})
|
||||
.expect("invalid 'render_resources' attribute format");
|
||||
.expect("Invalid 'render_resources' attribute format.");
|
||||
|
||||
attributes
|
||||
}),
|
||||
|
|
|
@ -59,7 +59,7 @@ impl Archetype {
|
|||
"attempted to allocate entity with duplicate components; \
|
||||
each type must occur at most once!"
|
||||
),
|
||||
core::cmp::Ordering::Greater => panic!("type info is unsorted"),
|
||||
core::cmp::Ordering::Greater => panic!("Type info is unsorted."),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -160,7 +160,7 @@ impl Archetype {
|
|||
.get(&TypeId::of::<T>())
|
||||
.map_or(false, |x| !x.borrow.borrow())
|
||||
{
|
||||
panic!("{} already borrowed uniquely", type_name::<T>());
|
||||
panic!("{} already borrowed uniquely.", type_name::<T>());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -172,7 +172,7 @@ impl Archetype {
|
|||
.get(&TypeId::of::<T>())
|
||||
.map_or(false, |x| !x.borrow.borrow_mut())
|
||||
{
|
||||
panic!("{} already borrowed", type_name::<T>());
|
||||
panic!("{} already borrowed.", type_name::<T>());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ impl Entities {
|
|||
id: u32::try_from(self.meta.len())
|
||||
.ok()
|
||||
.and_then(|x| x.checked_add(n))
|
||||
.expect("too many entities"),
|
||||
.expect("Too many entities."),
|
||||
}
|
||||
}
|
||||
// The freelist has entities in it, so move the last entry to the reserved list, to
|
||||
|
|
|
@ -5,7 +5,7 @@ use thiserror::Error;
|
|||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum MapEntitiesError {
|
||||
#[error("The given entity does not exist in the map.")]
|
||||
#[error("the given entity does not exist in the map")]
|
||||
EntityNotFound(Entity),
|
||||
}
|
||||
|
||||
|
|
|
@ -233,7 +233,7 @@ impl Resources {
|
|||
Ordering::Equal => {
|
||||
storage.push(resource);
|
||||
}
|
||||
Ordering::Greater => panic!("attempted to access index beyond 'current_capacity + 1'"),
|
||||
Ordering::Greater => panic!("Attempted to access index beyond 'current_capacity + 1'."),
|
||||
Ordering::Less => {
|
||||
*storage.get_mut(index).unwrap() = resource;
|
||||
}
|
||||
|
@ -279,7 +279,7 @@ impl Resources {
|
|||
.unwrap();
|
||||
resources.get_unsafe_ref(index)
|
||||
})
|
||||
.unwrap_or_else(|| panic!("Resource does not exist {}", std::any::type_name::<T>()))
|
||||
.unwrap_or_else(|| panic!("Resource does not exist {}.", std::any::type_name::<T>()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -301,7 +301,7 @@ impl Resources {
|
|||
NonNull::new_unchecked(resources.stored[index].mutated.get()),
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| panic!("Resource does not exist {}", std::any::type_name::<T>()))
|
||||
.unwrap_or_else(|| panic!("Resource does not exist {}.", std::any::type_name::<T>()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -372,7 +372,7 @@ impl<'a, T: 'static> ResourceRef<'a, T> {
|
|||
}
|
||||
} else {
|
||||
panic!(
|
||||
"Failed to acquire shared lock on resource: {}",
|
||||
"Failed to acquire shared lock on resource: {}.",
|
||||
std::any::type_name::<T>()
|
||||
);
|
||||
}
|
||||
|
@ -432,7 +432,7 @@ impl<'a, T: 'static> ResourceRefMut<'a, T> {
|
|||
}
|
||||
} else {
|
||||
panic!(
|
||||
"Failed to acquire exclusive lock on resource: {}",
|
||||
"Failed to acquire exclusive lock on resource: {}.",
|
||||
std::any::type_name::<T>()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -289,7 +289,7 @@ impl ExecutorStage {
|
|||
self.ready_events_of_dependents[system_index].push(
|
||||
self.ready_events[*dependent_system]
|
||||
.as_ref()
|
||||
.expect("A dependent task should have a non-None ready event")
|
||||
.expect("A dependent task should have a non-None ready event.")
|
||||
.clone(),
|
||||
);
|
||||
}
|
||||
|
@ -318,7 +318,7 @@ impl ExecutorStage {
|
|||
if dependency_count > 0 {
|
||||
self.ready_events[system_index]
|
||||
.as_ref()
|
||||
.expect("A system with >0 dependency count should have a non-None ready event")
|
||||
.expect("A system with >0 dependency count should have a non-None ready event.")
|
||||
.reset(dependency_count as isize)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ impl Schedule {
|
|||
pub fn add_stage(&mut self, stage: impl Into<Cow<'static, str>>) {
|
||||
let stage: Cow<str> = stage.into();
|
||||
if self.stages.get(&stage).is_some() {
|
||||
panic!("Stage already exists: {}", stage);
|
||||
panic!("Stage already exists: {}.", stage);
|
||||
} else {
|
||||
self.stages.insert(stage.clone(), Vec::new());
|
||||
self.stage_order.push(stage);
|
||||
|
@ -58,7 +58,7 @@ impl Schedule {
|
|||
let target: Cow<str> = target.into();
|
||||
let stage: Cow<str> = stage.into();
|
||||
if self.stages.get(&stage).is_some() {
|
||||
panic!("Stage already exists: {}", stage);
|
||||
panic!("Stage already exists: {}.", stage);
|
||||
}
|
||||
|
||||
let target_index = self
|
||||
|
@ -67,7 +67,7 @@ impl Schedule {
|
|||
.enumerate()
|
||||
.find(|(_i, stage)| **stage == target)
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or_else(|| panic!("Target stage does not exist: {}", target));
|
||||
.unwrap_or_else(|| panic!("Target stage does not exist: {}.", target));
|
||||
|
||||
self.stages.insert(stage.clone(), Vec::new());
|
||||
self.stage_order.insert(target_index + 1, stage);
|
||||
|
@ -81,7 +81,7 @@ impl Schedule {
|
|||
let target: Cow<str> = target.into();
|
||||
let stage: Cow<str> = stage.into();
|
||||
if self.stages.get(&stage).is_some() {
|
||||
panic!("Stage already exists: {}", stage);
|
||||
panic!("Stage already exists: {}.", stage);
|
||||
}
|
||||
|
||||
let target_index = self
|
||||
|
@ -90,7 +90,7 @@ impl Schedule {
|
|||
.enumerate()
|
||||
.find(|(_i, stage)| **stage == target)
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or_else(|| panic!("Target stage does not exist: {}", target));
|
||||
.unwrap_or_else(|| panic!("Target stage does not exist: {}.", target));
|
||||
|
||||
self.stages.insert(stage.clone(), Vec::new());
|
||||
self.stage_order.insert(target_index, stage);
|
||||
|
@ -125,10 +125,10 @@ impl Schedule {
|
|||
let systems = self
|
||||
.stages
|
||||
.get_mut(&stage_name)
|
||||
.unwrap_or_else(|| panic!("Stage does not exist: {}", stage_name));
|
||||
.unwrap_or_else(|| panic!("Stage does not exist: {}.", stage_name));
|
||||
if self.system_ids.contains(&system.id()) {
|
||||
panic!(
|
||||
"System with id {:?} ({}) already exists",
|
||||
"System with id {:?} ({}) already exists.",
|
||||
system.id(),
|
||||
system.name()
|
||||
);
|
||||
|
@ -163,7 +163,7 @@ impl Schedule {
|
|||
.unwrap_or_else(|| panic!("Stage does not exist: {}", stage_name));
|
||||
if self.system_ids.contains(&system.id()) {
|
||||
panic!(
|
||||
"System with id {:?} ({}) already exists",
|
||||
"System with id {:?} ({}) already exists.",
|
||||
system.id(),
|
||||
system.name()
|
||||
);
|
||||
|
|
|
@ -193,7 +193,7 @@ impl Commands {
|
|||
let entity = self
|
||||
.entity_reserver
|
||||
.as_ref()
|
||||
.expect("entity reserver has not been set")
|
||||
.expect("Entity reserver has not been set.")
|
||||
.reserve_entity();
|
||||
self.current_entity = Some(entity);
|
||||
self.commands.push(Box::new(Insert { entity, components }));
|
||||
|
|
|
@ -64,7 +64,7 @@ impl SystemState {
|
|||
conflicts_with_index = Some(prior_index);
|
||||
}
|
||||
}
|
||||
panic!("System {} has conflicting queries. {} conflicts with the component access [{}] in this prior query: {}",
|
||||
panic!("System {} has conflicting queries. {} conflicts with the component access [{}] in this prior query: {}.",
|
||||
self.name,
|
||||
self.query_type_names[conflict_index],
|
||||
conflict_name.unwrap_or("Unknown"),
|
||||
|
|
|
@ -32,23 +32,23 @@ use thiserror::Error;
|
|||
/// An error that occurs when loading a GLTF file
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GltfError {
|
||||
#[error("Unsupported primitive mode.")]
|
||||
#[error("unsupported primitive mode")]
|
||||
UnsupportedPrimitive { mode: Mode },
|
||||
#[error("Unsupported min filter.")]
|
||||
#[error("unsupported min filter")]
|
||||
UnsupportedMinFilter { filter: MinFilter },
|
||||
#[error("Invalid GLTF file.")]
|
||||
#[error("invalid GLTF file")]
|
||||
Gltf(#[from] gltf::Error),
|
||||
#[error("Binary blob is missing.")]
|
||||
#[error("binary blob is missing")]
|
||||
MissingBlob,
|
||||
#[error("Failed to decode base64 mesh data.")]
|
||||
#[error("failed to decode base64 mesh data")]
|
||||
Base64Decode(#[from] base64::DecodeError),
|
||||
#[error("Unsupported buffer format.")]
|
||||
#[error("unsupported buffer format")]
|
||||
BufferFormatUnsupported,
|
||||
#[error("Invalid image mime type.")]
|
||||
#[error("invalid image mime type")]
|
||||
InvalidImageMimeType(String),
|
||||
#[error("Failed to load an image.")]
|
||||
#[error("failed to load an image")]
|
||||
ImageError(#[from] image::ImageError),
|
||||
#[error("Failed to load an asset path.")]
|
||||
#[error("failed to load an asset path")]
|
||||
AssetIoError(#[from] AssetIoError),
|
||||
}
|
||||
|
||||
|
|
|
@ -59,13 +59,13 @@ impl Plugin for LogPlugin {
|
|||
app.resources_mut().insert_thread_local(guard);
|
||||
let subscriber = subscriber.with(chrome_layer);
|
||||
bevy_utils::tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("Could not set global default tracing subscriber");
|
||||
.expect("Could not set global default tracing subscriber.");
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "tracing-chrome"))]
|
||||
{
|
||||
bevy_utils::tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("Could not set global default tracing subscriber");
|
||||
.expect("Could not set global default tracing subscriber.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,14 +76,14 @@ impl Plugin for LogPlugin {
|
|||
tracing_wasm::WASMLayerConfig::default(),
|
||||
));
|
||||
bevy_utils::tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("Could not set global default tracing subscriber");
|
||||
.expect("Could not set global default tracing subscriber.");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
let subscriber = subscriber.with(android_tracing::AndroidLayer::default());
|
||||
bevy_utils::tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("Could not set global default tracing subscriber");
|
||||
.expect("Could not set global default tracing subscriber.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ pub fn derive_reflect(input: TokenStream) -> TokenStream {
|
|||
}
|
||||
Ok(())
|
||||
})
|
||||
.expect("invalid 'property' attribute format");
|
||||
.expect("Invalid 'property' attribute format.");
|
||||
|
||||
attribute_args
|
||||
}),
|
||||
|
@ -295,7 +295,7 @@ fn impl_struct(
|
|||
self.field_mut(name).map(|v| v.apply(value));
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply non-struct type to struct type");
|
||||
panic!("Attempted to apply non-struct type to struct type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -414,7 +414,7 @@ fn impl_tuple_struct(
|
|||
self.field_mut(i).map(|v| v.apply(value));
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply non-TupleStruct type to TupleStruct type");
|
||||
panic!("Attempted to apply non-TupleStruct type to TupleStruct type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -483,7 +483,7 @@ fn impl_value(
|
|||
if let Some(value) = value.downcast_ref::<Self>() {
|
||||
*self = value.clone();
|
||||
} else {
|
||||
panic!("value is not {}", std::any::type_name::<Self>());
|
||||
panic!("Value is not {}.", std::any::type_name::<Self>());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,17 +35,17 @@ pub fn type_uuid_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStre
|
|||
|
||||
let uuid_str = match name_value.lit {
|
||||
Lit::Str(lit_str) => lit_str,
|
||||
_ => panic!("uuid attribute must take the form `#[uuid = \"xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\"`"),
|
||||
_ => panic!("`uuid` attribute must take the form `#[uuid = \"xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\"`."),
|
||||
};
|
||||
|
||||
uuid = Some(
|
||||
Uuid::parse_str(&uuid_str.value())
|
||||
.expect("Value specified to `#[uuid]` attribute is not a valid UUID"),
|
||||
.expect("Value specified to `#[uuid]` attribute is not a valid UUID."),
|
||||
);
|
||||
}
|
||||
|
||||
let uuid =
|
||||
uuid.expect("No `#[uuid = \"xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\"` attribute found");
|
||||
uuid.expect("No `#[uuid = \"xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\"` attribute found.");
|
||||
let bytes = uuid
|
||||
.as_bytes()
|
||||
.iter()
|
||||
|
@ -79,7 +79,7 @@ impl Parse for ExternalDeriveInput {
|
|||
pub fn external_type_uuid(tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let ExternalDeriveInput { path, uuid_str } = parse_macro_input!(tokens as ExternalDeriveInput);
|
||||
|
||||
let uuid = Uuid::parse_str(&uuid_str.value()).expect("Value was not a valid UUID");
|
||||
let uuid = Uuid::parse_str(&uuid_str.value()).expect("Value was not a valid UUID.");
|
||||
|
||||
let bytes = uuid
|
||||
.as_bytes()
|
||||
|
|
|
@ -30,7 +30,7 @@ where
|
|||
fn push(&mut self, value: Box<dyn Reflect>) {
|
||||
let value = value.take::<T::Item>().unwrap_or_else(|value| {
|
||||
panic!(
|
||||
"Attempted to push invalid value of type {}",
|
||||
"Attempted to push invalid value of type {}.",
|
||||
value.type_name()
|
||||
)
|
||||
});
|
||||
|
|
|
@ -51,7 +51,7 @@ impl<T: Reflect> List for Vec<T> {
|
|||
fn push(&mut self, value: Box<dyn Reflect>) {
|
||||
let value = value.take::<T>().unwrap_or_else(|value| {
|
||||
panic!(
|
||||
"Attempted to push invalid value of type {}",
|
||||
"Attempted to push invalid value of type {}.",
|
||||
value.type_name()
|
||||
)
|
||||
});
|
||||
|
@ -166,7 +166,7 @@ impl<K: Reflect + Clone + Eq + Hash, V: Reflect + Clone> Reflect for HashMap<K,
|
|||
}
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply a non-map type to a map type");
|
||||
panic!("Attempted to apply a non-map type to a map type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ mod tests {
|
|||
if let ReflectRef::Struct(value) = c.reflect_ref() {
|
||||
assert_eq!(*value.get_field::<u32>("x").unwrap(), 1);
|
||||
} else {
|
||||
panic!("expected a struct");
|
||||
panic!("Expected a struct.");
|
||||
}
|
||||
|
||||
// patch Foo with a dynamic struct
|
||||
|
|
|
@ -152,7 +152,7 @@ pub fn list_apply<L: List>(a: &mut L, b: &dyn Reflect) {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply a non-list type to a list type");
|
||||
panic!("Attempted to apply a non-list type to a list type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ impl Reflect for DynamicMap {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply a non-map type to a map type");
|
||||
panic!("Attempted to apply a non-map type to a map type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,28 +5,28 @@ use thiserror::Error;
|
|||
|
||||
#[derive(Debug, PartialEq, Eq, Error)]
|
||||
pub enum ReflectPathError<'a> {
|
||||
#[error("Expected an identifier at the given index")]
|
||||
#[error("expected an identifier at the given index")]
|
||||
ExpectedIdent { index: usize },
|
||||
#[error("The current struct doesn't have a field with the given name")]
|
||||
#[error("the current struct doesn't have a field with the given name")]
|
||||
InvalidField { index: usize, field: &'a str },
|
||||
#[error("The current tuple struct doesn't have a field with the given index")]
|
||||
#[error("the current tuple struct doesn't have a field with the given index")]
|
||||
InvalidTupleStructIndex {
|
||||
index: usize,
|
||||
tuple_struct_index: usize,
|
||||
},
|
||||
#[error("The current list doesn't have a value at the given index")]
|
||||
#[error("the current list doesn't have a value at the given index")]
|
||||
InvalidListIndex { index: usize, list_index: usize },
|
||||
#[error("Encountered an unexpected token")]
|
||||
#[error("encountered an unexpected token")]
|
||||
UnexpectedToken { index: usize, token: &'a str },
|
||||
#[error("Expected a token, but it wasn't there.")]
|
||||
#[error("expected a token, but it wasn't there.")]
|
||||
ExpectedToken { index: usize, token: &'a str },
|
||||
#[error("Expected a struct, but found a different reflect value")]
|
||||
#[error("expected a struct, but found a different reflect value")]
|
||||
ExpectedStruct { index: usize },
|
||||
#[error("Expected a list, but found a different reflect value")]
|
||||
#[error("expected a list, but found a different reflect value")]
|
||||
ExpectedList { index: usize },
|
||||
#[error("Failed to parse a usize")]
|
||||
#[error("failed to parse a usize")]
|
||||
IndexParseError(#[from] ParseIntError),
|
||||
#[error("Failed to downcast to the path result to the given type")]
|
||||
#[error("failed to downcast to the path result to the given type")]
|
||||
InvalidDowncast,
|
||||
}
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ impl Reflect for DynamicStruct {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply non-struct type to struct type");
|
||||
panic!("Attempted to apply non-struct type to struct type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ impl Reflect for DynamicTupleStruct {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
panic!("attempted to apply non-TupleStruct type to TupleStruct type");
|
||||
panic!("Attempted to apply non-TupleStruct type to TupleStruct type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -109,13 +109,13 @@ impl Draw {
|
|||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum DrawError {
|
||||
#[error("Pipeline does not exist.")]
|
||||
#[error("pipeline does not exist")]
|
||||
NonExistentPipeline,
|
||||
#[error("No pipeline set")]
|
||||
#[error("no pipeline set")]
|
||||
NoPipelineSet,
|
||||
#[error("Pipeline has no layout")]
|
||||
#[error("pipeline has no layout")]
|
||||
PipelineHasNoLayout,
|
||||
#[error("Failed to get a buffer for the given RenderResource.")]
|
||||
#[error("failed to get a buffer for the given `RenderResource`")]
|
||||
BufferAllocationFailure,
|
||||
}
|
||||
|
||||
|
|
|
@ -278,7 +278,7 @@ impl From<Icosphere> for Mesh {
|
|||
let number_of_resulting_points = (subdivisions * subdivisions * 10) + 2;
|
||||
|
||||
panic!(
|
||||
"Cannot create an icosphere of {} subdivisions due to there being too many vertices being generated: {} (Limited to 65535 vertices or 79 subdivisions)",
|
||||
"Cannot create an icosphere of {} subdivisions due to there being too many vertices being generated: {}. (Limited to 65535 vertices or 79 subdivisions)",
|
||||
sphere.subdivisions,
|
||||
number_of_resulting_points
|
||||
);
|
||||
|
|
|
@ -221,7 +221,7 @@ impl PipelineCompiler {
|
|||
.push(compiled_vertex_attribute);
|
||||
} else {
|
||||
panic!(
|
||||
"Attribute {} is required by shader, but not supplied by mesh. Either remove the attribute from the shader or supply the attribute ({}) to the mesh. ",
|
||||
"Attribute {} is required by shader, but not supplied by mesh. Either remove the attribute from the shader or supply the attribute ({}) to the mesh.",
|
||||
shader_vertex_attribute.name,
|
||||
shader_vertex_attribute.name,
|
||||
);
|
||||
|
|
|
@ -34,7 +34,7 @@ impl PipelineLayout {
|
|||
|| binding.name != shader_binding.name
|
||||
|| binding.index != shader_binding.index
|
||||
{
|
||||
panic!("Binding {} in BindGroup {} does not match across all shader types: {:?} {:?}", binding.index, bind_group.index, binding, shader_binding);
|
||||
panic!("Binding {} in BindGroup {} does not match across all shader types: {:?} {:?}.", binding.index, bind_group.index, binding, shader_binding);
|
||||
}
|
||||
} else {
|
||||
bind_group.bindings.push(shader_binding.clone());
|
||||
|
|
|
@ -21,26 +21,26 @@ use thiserror::Error;
|
|||
|
||||
#[derive(Error, Debug, Eq, PartialEq)]
|
||||
pub enum RenderGraphError {
|
||||
#[error("Node does not exist")]
|
||||
#[error("node does not exist")]
|
||||
InvalidNode(NodeLabel),
|
||||
#[error("Node slot does not exist")]
|
||||
#[error("node slot does not exist")]
|
||||
InvalidNodeSlot(SlotLabel),
|
||||
#[error("Node does not match the given type")]
|
||||
#[error("node does not match the given type")]
|
||||
WrongNodeType,
|
||||
#[error("Attempted to connect a node output slot to an incompatible input node slot")]
|
||||
#[error("attempted to connect a node output slot to an incompatible input node slot")]
|
||||
MismatchedNodeSlots {
|
||||
output_node: NodeId,
|
||||
output_slot: usize,
|
||||
input_node: NodeId,
|
||||
input_slot: usize,
|
||||
},
|
||||
#[error("Attempted to add an edge that already exists")]
|
||||
#[error("attempted to add an edge that already exists")]
|
||||
EdgeAlreadyExists(Edge),
|
||||
#[error("Node has an unconnected input slot.")]
|
||||
#[error("node has an unconnected input slot")]
|
||||
UnconnectedNodeInputSlot { node: NodeId, input_slot: usize },
|
||||
#[error("Node has an unconnected output slot.")]
|
||||
#[error("node has an unconnected output slot")]
|
||||
UnconnectedNodeOutputSlot { node: NodeId, output_slot: usize },
|
||||
#[error("Node input slot already occupied")]
|
||||
#[error("node input slot already occupied")]
|
||||
NodeInputSlotAlreadyOccupied {
|
||||
node: NodeId,
|
||||
input_slot: usize,
|
||||
|
|
|
@ -274,7 +274,7 @@ where
|
|||
{
|
||||
dynamic_index
|
||||
} else {
|
||||
panic!("dynamic index should always be set");
|
||||
panic!("Dynamic index should always be set.");
|
||||
};
|
||||
render_resource_bindings.set(render_resource_name, binding);
|
||||
(buffer_array.buffer.unwrap(), dynamic_index)
|
||||
|
|
|
@ -49,7 +49,7 @@ impl Node for WindowSwapChainNode {
|
|||
|
||||
let window = windows
|
||||
.get(self.window_id)
|
||||
.expect("Received window resized event for non-existent window");
|
||||
.expect("Received window resized event for non-existent window.");
|
||||
|
||||
let render_resource_context = render_context.resources_mut();
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ impl Node for WindowTextureNode {
|
|||
|
||||
let window = windows
|
||||
.get(self.window_id)
|
||||
.expect("Received window resized event for non-existent window");
|
||||
.expect("Received window resized event for non-existent window.");
|
||||
|
||||
if self
|
||||
.window_created_event_reader
|
||||
|
|
|
@ -4,7 +4,8 @@ use thiserror::Error;
|
|||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum StagerError {
|
||||
#[error("Encountered a RenderGraphError")]
|
||||
// This might have to be `:` tagged at the end.
|
||||
#[error("encountered a `RenderGraphError`")]
|
||||
RenderGraphError(#[from] RenderGraphError),
|
||||
}
|
||||
|
||||
|
@ -211,7 +212,7 @@ fn stage_node(
|
|||
.map(|e| {
|
||||
node_stages_and_jobs
|
||||
.get(&e.get_output_node())
|
||||
.expect("already checked that parents were visited")
|
||||
.expect("Already checked that parents were visited.")
|
||||
})
|
||||
.max()
|
||||
{
|
||||
|
@ -223,7 +224,7 @@ fn stage_node(
|
|||
.filter(|e| {
|
||||
let (max_stage, _) = node_stages_and_jobs
|
||||
.get(&e.get_output_node())
|
||||
.expect("already checked that parents were visited");
|
||||
.expect("Already checked that parents were visited.");
|
||||
max_stage == max_parent_stage
|
||||
})
|
||||
.count();
|
||||
|
|
|
@ -160,7 +160,7 @@ impl RenderResourceBindings {
|
|||
BindGroupStatus::Changed(id) => {
|
||||
let bind_group = self
|
||||
.get_bind_group(id)
|
||||
.expect("RenderResourceSet was just changed, so it should exist");
|
||||
.expect("`RenderResourceSet` was just changed, so it should exist.");
|
||||
render_resource_context.create_bind_group(bind_group_descriptor.id, bind_group);
|
||||
}
|
||||
BindGroupStatus::Unchanged(id) => {
|
||||
|
@ -168,7 +168,7 @@ impl RenderResourceBindings {
|
|||
// when a stale bind group has been removed
|
||||
let bind_group = self
|
||||
.get_bind_group(id)
|
||||
.expect("RenderResourceSet was just changed, so it should exist");
|
||||
.expect("`RenderResourceSet` was just changed, so it should exist.");
|
||||
render_resource_context.create_bind_group(bind_group_descriptor.id, bind_group);
|
||||
}
|
||||
BindGroupStatus::NoMatch => {
|
||||
|
@ -297,7 +297,7 @@ mod tests {
|
|||
let id = if let BindGroupStatus::Changed(id) = status {
|
||||
id
|
||||
} else {
|
||||
panic!("expected a changed bind group");
|
||||
panic!("Expected a changed bind group.");
|
||||
};
|
||||
|
||||
let different_bind_group_status =
|
||||
|
@ -309,7 +309,7 @@ mod tests {
|
|||
);
|
||||
different_bind_group_id
|
||||
} else {
|
||||
panic!("expected a changed bind group");
|
||||
panic!("Expected a changed bind group.");
|
||||
};
|
||||
|
||||
let equal_bind_group_status = equal_bindings.update_bind_group(&bind_group_descriptor);
|
||||
|
@ -319,7 +319,7 @@ mod tests {
|
|||
"equal bind group should have the same id"
|
||||
);
|
||||
} else {
|
||||
panic!("expected a changed bind group");
|
||||
panic!("Expected a changed bind group.");
|
||||
};
|
||||
|
||||
let mut unmatched_bindings = RenderResourceBindings::default();
|
||||
|
|
|
@ -137,13 +137,13 @@ impl Shader {
|
|||
enforce_bevy_conventions,
|
||||
))
|
||||
} else {
|
||||
panic!("Cannot reflect layout of non-SpirV shader. Try compiling this shader to SpirV first using self.get_spirv_shader()");
|
||||
panic!("Cannot reflect layout of non-SpirV shader. Try compiling this shader to SpirV first using self.get_spirv_shader().");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn reflect_layout(&self, _enforce_bevy_conventions: bool) -> Option<ShaderLayout> {
|
||||
panic!("Cannot reflect layout on wasm32");
|
||||
panic!("Cannot reflect layout on wasm32.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ impl ShaderLayout {
|
|||
entry_point: entry_point_name,
|
||||
}
|
||||
}
|
||||
Err(err) => panic!("Failed to reflect shader layout: {:?}", err),
|
||||
Err(err) => panic!("Failed to reflect shader layout: {:?}.", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ fn reflect_dimension(type_description: &ReflectTypeDescription) -> TextureViewDi
|
|||
ReflectDimension::Type2d => TextureViewDimension::D2,
|
||||
ReflectDimension::Type3d => TextureViewDimension::D3,
|
||||
ReflectDimension::Cube => TextureViewDimension::Cube,
|
||||
dimension => panic!("unsupported image dimension: {:?}", dimension),
|
||||
dimension => panic!("Unsupported image dimension: {:?}.", dimension),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,7 +142,7 @@ fn reflect_binding(
|
|||
),
|
||||
// TODO: detect comparison "true" case: https://github.com/gpuweb/gpuweb/issues/552
|
||||
ReflectDescriptorType::Sampler => (&binding.name, BindType::Sampler { comparison: false }),
|
||||
_ => panic!("unsupported bind type {:?}", binding.descriptor_type),
|
||||
_ => panic!("Unsupported bind type {:?}.", binding.descriptor_type),
|
||||
};
|
||||
|
||||
let mut shader_stage = match shader_stage {
|
||||
|
@ -199,7 +199,7 @@ fn reflect_uniform_numeric(type_description: &ReflectTypeDescription) -> Uniform
|
|||
match traits.numeric.scalar.signedness {
|
||||
0 => NumberType::UInt,
|
||||
1 => NumberType::Int,
|
||||
signedness => panic!("unexpected signedness {}", signedness),
|
||||
signedness => panic!("Unexpected signedness {}.", signedness),
|
||||
}
|
||||
} else if type_description
|
||||
.type_flags
|
||||
|
@ -207,7 +207,7 @@ fn reflect_uniform_numeric(type_description: &ReflectTypeDescription) -> Uniform
|
|||
{
|
||||
NumberType::Float
|
||||
} else {
|
||||
panic!("unexpected type flag {:?}", type_description.type_flags);
|
||||
panic!("Unexpected type flag {:?}.", type_description.type_flags);
|
||||
};
|
||||
|
||||
// TODO: handle scalar width here
|
||||
|
@ -252,7 +252,7 @@ fn reflect_vertex_format(type_description: &ReflectTypeDescription) -> VertexFor
|
|||
match traits.numeric.scalar.signedness {
|
||||
0 => NumberType::UInt,
|
||||
1 => NumberType::Int,
|
||||
signedness => panic!("unexpected signedness {}", signedness),
|
||||
signedness => panic!("Unexpected signedness {}.", signedness),
|
||||
}
|
||||
} else if type_description
|
||||
.type_flags
|
||||
|
@ -260,7 +260,7 @@ fn reflect_vertex_format(type_description: &ReflectTypeDescription) -> VertexFor
|
|||
{
|
||||
NumberType::Float
|
||||
} else {
|
||||
panic!("unexpected type flag {:?}", type_description.type_flags);
|
||||
panic!("Unexpected type flag {:?}.", type_description.type_flags);
|
||||
};
|
||||
|
||||
let width = traits.numeric.scalar.width;
|
||||
|
|
|
@ -74,11 +74,11 @@ impl Texture {
|
|||
debug_assert_eq!(
|
||||
pixel.len() % format.pixel_size(),
|
||||
0,
|
||||
"Must not have incomplete pixel data"
|
||||
"Must not have incomplete pixel data."
|
||||
);
|
||||
debug_assert!(
|
||||
pixel.len() <= value.data.len(),
|
||||
"Fill data must fit within pixel buffer"
|
||||
"Fill data must fit within pixel buffer."
|
||||
);
|
||||
|
||||
for current_pixel in value.data.chunks_exact_mut(pixel.len()) {
|
||||
|
|
|
@ -7,7 +7,7 @@ use thiserror::Error;
|
|||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DynamicSceneToWorldError {
|
||||
#[error("Scene contains an unregistered component.")]
|
||||
#[error("scene contains an unregistered component")]
|
||||
UnregisteredComponent { type_name: String },
|
||||
}
|
||||
|
||||
|
|
|
@ -34,11 +34,11 @@ pub struct SceneSpawner {
|
|||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum SceneSpawnError {
|
||||
#[error("Scene contains an unregistered component.")]
|
||||
#[error("scene contains an unregistered component")]
|
||||
UnregisteredComponent { type_name: String },
|
||||
#[error("Scene does not exist. Perhaps it is still loading?")]
|
||||
#[error("scene does not exist")]
|
||||
NonExistentScene { handle: Handle<DynamicScene> },
|
||||
#[error("Scene does not exist. Perhaps it is still loading?")]
|
||||
#[error("scene does not exist")]
|
||||
NonExistentRealScene { handle: Handle<Scene> },
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ impl Default for TextureAtlasBuilder {
|
|||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RectanglePackError {
|
||||
#[error("Could not pack textures into an atlas within the given bounds")]
|
||||
#[error("could not pack textures into an atlas within the given bounds")]
|
||||
NotEnoughSpace,
|
||||
}
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ impl Drop for TaskPoolInner {
|
|||
for join_handle in self.threads.drain(..) {
|
||||
join_handle
|
||||
.join()
|
||||
.expect("task thread panicked while executing");
|
||||
.expect("Task thread panicked while executing.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ impl TaskPool {
|
|||
// Use unwrap_err because we expect a Closed error
|
||||
future::block_on(shutdown_future).unwrap_err();
|
||||
})
|
||||
.expect("failed to spawn thread")
|
||||
.expect("Failed to spawn thread.")
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ impl<'a> Drawable for DrawableText<'a> {
|
|||
{
|
||||
draw.set_vertex_buffer(0, vertex_attribute_buffer_id, 0);
|
||||
} else {
|
||||
println!("could not find vertex buffer for bevy_sprite::QUAD_HANDLE")
|
||||
println!("Could not find vertex buffer for `bevy_sprite::QUAD_HANDLE`.")
|
||||
}
|
||||
|
||||
let mut indices = 0..0;
|
||||
|
@ -87,7 +87,7 @@ impl<'a> Drawable for DrawableText<'a> {
|
|||
if let Some(buffer_info) = render_resource_context.get_buffer_info(quad_index_buffer) {
|
||||
indices = 0..(buffer_info.size / 4) as u32;
|
||||
} else {
|
||||
panic!("expected buffer type");
|
||||
panic!("Expected buffer type.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@ use thiserror::Error;
|
|||
|
||||
#[derive(Debug, PartialEq, Eq, Error)]
|
||||
pub enum TextError {
|
||||
#[error("Font not found")]
|
||||
#[error("font not found")]
|
||||
NoSuchFont,
|
||||
#[error("Failed to add glyph to newly-created atlas {0:?}")]
|
||||
#[error("failed to add glyph to newly-created atlas {0:?}")]
|
||||
FailedToAddGlyph(GlyphId),
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ impl GlyphBrush {
|
|||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let first_glyph = glyphs.first().expect("Must have at least one glyph");
|
||||
let first_glyph = glyphs.first().expect("Must have at least one glyph.");
|
||||
let font_id = first_glyph.font_id.0;
|
||||
let handle = &self.handles[font_id];
|
||||
let font = fonts.get(handle).ok_or(TextError::NoSuchFont)?;
|
||||
|
|
|
@ -133,7 +133,7 @@ fn add_text_to_pipeline(
|
|||
) {
|
||||
Err(TextError::NoSuchFont) => TextPipelineResult::Reschedule,
|
||||
Err(e @ TextError::FailedToAddGlyph(_)) => {
|
||||
panic!("Fatal error when processing text: {}", e);
|
||||
panic!("Fatal error when processing text: {}.", e);
|
||||
}
|
||||
Ok(()) => TextPipelineResult::Ok,
|
||||
}
|
||||
|
|
|
@ -188,11 +188,11 @@ fn get_texture_view<'a>(
|
|||
TextureAttachment::Name(name) => match global_render_resource_bindings.get(&name) {
|
||||
Some(RenderResourceBinding::Texture(resource)) => refs.textures.get(&resource).unwrap(),
|
||||
_ => {
|
||||
panic!("Color attachment {} does not exist", name);
|
||||
panic!("Color attachment {} does not exist.", name);
|
||||
}
|
||||
},
|
||||
TextureAttachment::Id(render_resource) => refs.textures.get(&render_resource).unwrap_or_else(|| &refs.swap_chain_frames.get(&render_resource).unwrap().output.view),
|
||||
TextureAttachment::Input(_) => panic!("Encountered unset TextureAttachment::Input. The RenderGraph executor should always set TextureAttachment::Inputs to TextureAttachment::RenderResource before running. This is a bug"),
|
||||
TextureAttachment::Input(_) => panic!("Encountered unset `TextureAttachment::Input`. The `RenderGraph` executor should always set `TextureAttachment::Inputs` to `TextureAttachment::RenderResource` before running. This is a bug, please report it!"),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -60,14 +60,14 @@ impl WgpuRenderGraphExecutor {
|
|||
let outputs = if let Some(outputs) = node_outputs.get(output_node) {
|
||||
outputs
|
||||
} else {
|
||||
panic!("node inputs not set")
|
||||
panic!("Node inputs not set.")
|
||||
};
|
||||
|
||||
let output_resource =
|
||||
outputs.get(*output_index).expect("output should be set");
|
||||
outputs.get(*output_index).expect("Output should be set.");
|
||||
input_slot.resource = Some(output_resource);
|
||||
} else {
|
||||
panic!("no edge connected to input")
|
||||
panic!("No edge connected to input.")
|
||||
}
|
||||
}
|
||||
node_state.node.update(
|
||||
|
|
|
@ -279,7 +279,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
|
|||
let swap_chain_descriptor: wgpu::SwapChainDescriptor = window.wgpu_into();
|
||||
let surface = surfaces
|
||||
.get(&window.id())
|
||||
.expect("No surface found for window");
|
||||
.expect("No surface found for window.");
|
||||
let swap_chain = self
|
||||
.device
|
||||
.create_swap_chain(surface, &swap_chain_descriptor);
|
||||
|
@ -552,7 +552,7 @@ impl RenderResourceContext for WgpuRenderResourceContext {
|
|||
let data = buffer_slice.map_async(wgpu::MapMode::Write);
|
||||
self.device.poll(wgpu::Maintain::Wait);
|
||||
if future::block_on(data).is_err() {
|
||||
panic!("failed to map buffer to host");
|
||||
panic!("Failed to map buffer to host.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ impl<'a> RenderPass for WgpuRenderPass<'a> {
|
|||
.render_pipelines
|
||||
.get(pipeline_handle)
|
||||
.expect(
|
||||
"Attempted to use a pipeline that does not exist in this RenderPass's RenderContext",
|
||||
"Attempted to use a pipeline that does not exist in this `RenderPass`'s `RenderContext`.",
|
||||
);
|
||||
self.render_pass.set_pipeline(pipeline);
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ impl WgpuRenderer {
|
|||
{
|
||||
let window = windows
|
||||
.get(window_created_event.id)
|
||||
.expect("Received window created event for non-existent window");
|
||||
.expect("Received window created event for non-existent window.");
|
||||
#[cfg(feature = "bevy_winit")]
|
||||
{
|
||||
let winit_windows = resources.get::<bevy_winit::WinitWindows>().unwrap();
|
||||
|
|
|
@ -59,12 +59,12 @@ impl WinitWindows {
|
|||
let document = window.document().unwrap();
|
||||
let canvas = document
|
||||
.query_selector(&selector)
|
||||
.expect("Cannot query for canvas element");
|
||||
.expect("Cannot query for canvas element.");
|
||||
if let Some(canvas) = canvas {
|
||||
let canvas = canvas.dyn_into::<web_sys::HtmlCanvasElement>().ok();
|
||||
winit_window_builder = winit_window_builder.with_canvas(canvas);
|
||||
} else {
|
||||
panic!("Cannot find element: {}", selector);
|
||||
panic!("Cannot find element: {}.", selector);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ impl WinitWindows {
|
|||
let body = document.body().unwrap();
|
||||
|
||||
body.append_child(&canvas)
|
||||
.expect("Append canvas to HTML body");
|
||||
.expect("Append canvas to HTML body.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -289,16 +289,16 @@ fn move_system(time: Res<Time>, mut q: Query<(&Velocity, Mut<Transform>)>) {
|
|||
/// the program is run through `cargo`.
|
||||
fn contributors() -> Contributors {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR")
|
||||
.expect("This example needs to run through `cargo run --example`");
|
||||
.expect("This example needs to run through `cargo run --example`.");
|
||||
|
||||
let mut cmd = std::process::Command::new("git")
|
||||
.args(&["--no-pager", "log", "--pretty=format:%an"])
|
||||
.current_dir(manifest_dir)
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.expect("git needs to be installed");
|
||||
.expect("`git` needs to be installed.");
|
||||
|
||||
let stdout = cmd.stdout.take().expect("Child should have a stdout");
|
||||
let stdout = cmd.stdout.take().expect("`Child` should have a stdout.");
|
||||
|
||||
BufReader::new(stdout)
|
||||
.lines()
|
||||
|
|
|
@ -47,7 +47,7 @@ fn bounce_system(
|
|||
windows: Res<Windows>,
|
||||
mut sprites: Query<(&Transform, &mut Velocity)>,
|
||||
) {
|
||||
let window = windows.get_primary().expect("No primary window");
|
||||
let window = windows.get_primary().expect("No primary window.");
|
||||
let width = window.width();
|
||||
let height = window.height();
|
||||
let left = width as f32 / -2.0;
|
||||
|
|
Loading…
Reference in a new issue