Add core and alloc over std Lints (#15281)

# Objective

- Fixes #6370
- Closes #6581

## Solution

- Added the following lints to the workspace:
  - `std_instead_of_core`
  - `std_instead_of_alloc`
  - `alloc_instead_of_core`
- Used `cargo +nightly fmt` with [item level use
formatting](https://rust-lang.github.io/rustfmt/?version=v1.6.0&search=#Item%5C%3A)
to split all `use` statements into single items.
- Used `cargo clippy --workspace --all-targets --all-features --fix
--allow-dirty` to _attempt_ to resolve the new linting issues, and
intervened where the lint was unable to resolve the issue automatically
(usually due to needing an `extern crate alloc;` statement in a crate
root).
- Manually removed certain uses of `std` where negative feature gating
prevented `--all-features` from finding the offending uses.
- Used `cargo +nightly fmt` with [crate level use
formatting](https://rust-lang.github.io/rustfmt/?version=v1.6.0&search=#Crate%5C%3A)
to re-merge all `use` statements matching Bevy's previous styling.
- Manually fixed cases where the `fmt` tool could not re-merge `use`
statements due to conditional compilation attributes.

## Testing

- Ran CI locally

## Migration Guide

The MSRV is now 1.81. Please update to this version or higher.

## Notes

- This is a _massive_ change to try and push through, which is why I've
outlined the semi-automatic steps I used to create this PR, in case this
fails and someone else tries again in the future.
- Making this change has no impact on user code, but does mean Bevy
contributors will be warned to use `core` and `alloc` instead of `std`
where possible.
- This lint is a critical first step towards investigating `no_std`
options for Bevy.

---------

Co-authored-by: François Mockers <francois.mockers@vleue.com>
This commit is contained in:
Zachary Harrold 2024-09-27 10:59:59 +10:00 committed by GitHub
parent 4e7801388c
commit d70595b667
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
456 changed files with 1720 additions and 1661 deletions

View file

@ -48,6 +48,10 @@ ref_as_ptr = "warn"
# see: https://github.com/bevyengine/bevy/pull/15375#issuecomment-2366966219
too_long_first_doc_paragraph = "allow"
std_instead_of_core = "warn"
std_instead_of_alloc = "warn"
alloc_instead_of_core = "warn"
[workspace.lints.rust]
missing_docs = "warn"
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs_dep)'] }
@ -55,8 +59,44 @@ unsafe_code = "deny"
unsafe_op_in_unsafe_fn = "warn"
unused_qualifications = "warn"
[lints]
workspace = true
# Unfortunately, cargo does not currently support overriding workspace lints
# inside a particular crate. See https://github.com/rust-lang/cargo/issues/13157
#
# We require an override for cases like `std_instead_of_core`, which are intended
# for the library contributors and not for how users should consume Bevy.
# To ensure examples aren't subject to these lints, below is a duplication of the
# workspace lints, with the "overrides" applied.
#
# [lints]
# workspace = true
[lints.clippy]
doc_markdown = "warn"
manual_let_else = "warn"
match_same_arms = "warn"
redundant_closure_for_method_calls = "warn"
redundant_else = "warn"
semicolon_if_nothing_returned = "warn"
type_complexity = "allow"
undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"
ref_as_ptr = "warn"
too_long_first_doc_paragraph = "allow"
std_instead_of_core = "allow"
std_instead_of_alloc = "allow"
alloc_instead_of_core = "allow"
[lints.rust]
missing_docs = "warn"
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs_dep)'] }
unsafe_code = "deny"
unsafe_op_in_unsafe_fn = "warn"
unused_qualifications = "warn"
[features]
default = [

View file

@ -86,7 +86,7 @@ fn generic_bench<P: Copy>(
fn all_added_detection_generic<T: Component + Default>(group: &mut BenchGroup, entity_count: u32) {
group.bench_function(
format!("{}_entities_{}", entity_count, std::any::type_name::<T>()),
format!("{}_entities_{}", entity_count, core::any::type_name::<T>()),
|bencher| {
bencher.iter_batched_ref(
|| {
@ -110,8 +110,8 @@ fn all_added_detection_generic<T: Component + Default>(group: &mut BenchGroup, e
fn all_added_detection(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("all_added_detection");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for &entity_count in ENTITIES_TO_BENCH_COUNT {
generic_bench(
&mut group,
@ -129,7 +129,7 @@ fn all_changed_detection_generic<T: Component + Default + BenchModify>(
entity_count: u32,
) {
group.bench_function(
format!("{}_entities_{}", entity_count, std::any::type_name::<T>()),
format!("{}_entities_{}", entity_count, core::any::type_name::<T>()),
|bencher| {
bencher.iter_batched_ref(
|| {
@ -158,8 +158,8 @@ fn all_changed_detection_generic<T: Component + Default + BenchModify>(
fn all_changed_detection(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("all_changed_detection");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for &entity_count in ENTITIES_TO_BENCH_COUNT {
generic_bench(
&mut group,
@ -179,7 +179,7 @@ fn few_changed_detection_generic<T: Component + Default + BenchModify>(
let ratio_to_modify = 0.1;
let amount_to_modify = (entity_count as f32 * ratio_to_modify) as usize;
group.bench_function(
format!("{}_entities_{}", entity_count, std::any::type_name::<T>()),
format!("{}_entities_{}", entity_count, core::any::type_name::<T>()),
|bencher| {
bencher.iter_batched_ref(
|| {
@ -208,8 +208,8 @@ fn few_changed_detection_generic<T: Component + Default + BenchModify>(
fn few_changed_detection(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("few_changed_detection");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for &entity_count in ENTITIES_TO_BENCH_COUNT {
generic_bench(
&mut group,
@ -227,7 +227,7 @@ fn none_changed_detection_generic<T: Component + Default>(
entity_count: u32,
) {
group.bench_function(
format!("{}_entities_{}", entity_count, std::any::type_name::<T>()),
format!("{}_entities_{}", entity_count, core::any::type_name::<T>()),
|bencher| {
bencher.iter_batched_ref(
|| {
@ -252,8 +252,8 @@ fn none_changed_detection_generic<T: Component + Default>(
fn none_changed_detection(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("none_changed_detection");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for &entity_count in ENTITIES_TO_BENCH_COUNT {
generic_bench(
&mut group,
@ -308,7 +308,7 @@ fn multiple_archetype_none_changed_detection_generic<T: Component + Default + Be
"{}_archetypes_{}_entities_{}",
archetype_count,
entity_count,
std::any::type_name::<T>()
core::any::type_name::<T>()
),
|bencher| {
bencher.iter_batched_ref(
@ -356,8 +356,8 @@ fn multiple_archetype_none_changed_detection_generic<T: Component + Default + Be
fn multiple_archetype_none_changed_detection(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("multiple_archetypes_none_changed_detection");
group.warm_up_time(std::time::Duration::from_millis(800));
group.measurement_time(std::time::Duration::from_secs(8));
group.warm_up_time(core::time::Duration::from_millis(800));
group.measurement_time(core::time::Duration::from_secs(8));
for archetype_count in [5, 20, 100] {
for entity_count in [10, 100, 1000, 10000] {
multiple_archetype_none_changed_detection_generic::<Table>(

View file

@ -23,8 +23,8 @@ criterion_group!(
fn add_remove(c: &mut Criterion) {
let mut group = c.benchmark_group("add_remove");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("table", |b| {
let mut bench = add_remove_table::Benchmark::new();
b.iter(move || bench.run());
@ -38,8 +38,8 @@ fn add_remove(c: &mut Criterion) {
fn add_remove_big(c: &mut Criterion) {
let mut group = c.benchmark_group("add_remove_big");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("table", |b| {
let mut bench = add_remove_big_table::Benchmark::new();
b.iter(move || bench.run());
@ -53,8 +53,8 @@ fn add_remove_big(c: &mut Criterion) {
fn add_remove_very_big(c: &mut Criterion) {
let mut group = c.benchmark_group("add_remove_very_big");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("table", |b| {
let mut bench = add_remove_very_big_table::Benchmark::new();
b.iter(move || bench.run());
@ -64,8 +64,8 @@ fn add_remove_very_big(c: &mut Criterion) {
fn insert_simple(c: &mut Criterion) {
let mut group = c.benchmark_group("insert_simple");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("base", |b| {
let mut bench = insert_simple::Benchmark::new();
b.iter(move || bench.run());

View file

@ -19,7 +19,7 @@ impl<const SIZE: usize> Benchmark<SIZE> {
pub fn run(&mut self) {
let mut reader = self.0.get_cursor();
for evt in reader.read(&self.0) {
std::hint::black_box(evt);
core::hint::black_box(evt);
}
}
}

View file

@ -7,8 +7,8 @@ criterion_group!(event_benches, send, iter);
fn send(c: &mut Criterion) {
let mut group = c.benchmark_group("events_send");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for count in [100, 1000, 10000, 50000] {
group.bench_function(format!("size_4_events_{}", count), |b| {
let mut bench = send::Benchmark::<4>::new(count);
@ -32,8 +32,8 @@ fn send(c: &mut Criterion) {
fn iter(c: &mut Criterion) {
let mut group = c.benchmark_group("events_iter");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for count in [100, 1000, 10000, 50000] {
group.bench_function(format!("size_4_events_{}", count), |b| {
let mut bench = iter::Benchmark::<4>::new(count);

View file

@ -32,7 +32,7 @@ impl<const SIZE: usize> Benchmark<SIZE> {
pub fn run(&mut self) {
for _ in 0..self.count {
self.events
.send(std::hint::black_box(BenchEvent([0u8; SIZE])));
.send(core::hint::black_box(BenchEvent([0u8; SIZE])));
}
self.events.update();
}

View file

@ -2,7 +2,7 @@ use bevy_ecs::prelude::*;
use bevy_ecs::system::SystemState;
use criterion::*;
use glam::*;
use std::hint::black_box;
use core::hint::black_box;
criterion_group!(fragmentation_benches, iter_frag_empty);
@ -17,8 +17,8 @@ fn flip_coin() -> bool {
}
fn iter_frag_empty(c: &mut Criterion) {
let mut group = c.benchmark_group("iter_fragmented(4096)_empty");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("foreach_table", |b| {
let mut world = World::new();

View file

@ -17,8 +17,8 @@ pub fn heavy_compute(c: &mut Criterion) {
struct Transform(Mat4);
let mut group = c.benchmark_group("heavy_compute");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("base", |b| {
ComputeTaskPool::get_or_init(TaskPool::default);

View file

@ -20,7 +20,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),

View file

@ -20,7 +20,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),

View file

@ -22,7 +22,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),

View file

@ -34,7 +34,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),

View file

@ -36,7 +36,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),

View file

@ -22,7 +22,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),

View file

@ -20,7 +20,7 @@ impl Benchmark {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),

View file

@ -34,7 +34,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),

View file

@ -36,7 +36,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),

View file

@ -35,8 +35,8 @@ criterion_group!(
fn iter_simple(c: &mut Criterion) {
let mut group = c.benchmark_group("iter_simple");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("base", |b| {
let mut bench = iter_simple::Benchmark::new();
b.iter(move || bench.run());
@ -82,8 +82,8 @@ fn iter_simple(c: &mut Criterion) {
fn iter_frag(c: &mut Criterion) {
let mut group = c.benchmark_group("iter_fragmented");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("base", |b| {
let mut bench = iter_frag::Benchmark::new();
b.iter(move || bench.run());
@ -105,8 +105,8 @@ fn iter_frag(c: &mut Criterion) {
fn iter_frag_sparse(c: &mut Criterion) {
let mut group = c.benchmark_group("iter_fragmented_sparse");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("base", |b| {
let mut bench = iter_frag_sparse::Benchmark::new();
b.iter(move || bench.run());
@ -128,8 +128,8 @@ fn iter_frag_sparse(c: &mut Criterion) {
fn par_iter_simple(c: &mut Criterion) {
let mut group = c.benchmark_group("par_iter_simple");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for f in [0, 10, 100, 1000] {
group.bench_function(format!("with_{}_fragment", f), |b| {
let mut bench = par_iter_simple::Benchmark::new(f);

View file

@ -31,7 +31,7 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
let iter = world.spawn_batch(
std::iter::repeat((
core::iter::repeat((
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),

View file

@ -22,8 +22,8 @@ fn deterministic_rand() -> ChaCha8Rng {
pub fn event_propagation(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("event_propagation");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("single_event_type", |bencher| {
let mut world = World::new();

View file

@ -12,8 +12,8 @@ struct EventBase;
pub fn observe_simple(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("observe");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("trigger_simple", |bencher| {
let mut world = World::new();

View file

@ -14,8 +14,8 @@ fn no() -> bool {
pub fn run_condition_yes(criterion: &mut Criterion) {
let mut world = World::new();
let mut group = criterion.benchmark_group("run_condition/yes");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
fn empty() {}
for amount in 0..21 {
let mut schedule = Schedule::default();
@ -37,8 +37,8 @@ pub fn run_condition_yes(criterion: &mut Criterion) {
pub fn run_condition_no(criterion: &mut Criterion) {
let mut world = World::new();
let mut group = criterion.benchmark_group("run_condition/no");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
fn empty() {}
for amount in 0..21 {
let mut schedule = Schedule::default();
@ -64,8 +64,8 @@ pub fn run_condition_yes_with_query(criterion: &mut Criterion) {
let mut world = World::new();
world.spawn(TestBool(true));
let mut group = criterion.benchmark_group("run_condition/yes_using_query");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
fn empty() {}
fn yes_with_query(query: Query<&TestBool>) -> bool {
query.single().0
@ -93,8 +93,8 @@ pub fn run_condition_yes_with_resource(criterion: &mut Criterion) {
let mut world = World::new();
world.insert_resource(TestBool(true));
let mut group = criterion.benchmark_group("run_condition/yes_using_resource");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
fn empty() {}
fn yes_with_resource(res: Res<TestBool>) -> bool {
res.0

View file

@ -17,8 +17,8 @@ const ENTITY_BUNCH: usize = 5000;
pub fn empty_systems(criterion: &mut Criterion) {
let mut world = World::new();
let mut group = criterion.benchmark_group("empty_systems");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
fn empty() {}
for amount in 0..5 {
let mut schedule = Schedule::default();
@ -50,23 +50,23 @@ pub fn empty_systems(criterion: &mut Criterion) {
pub fn busy_systems(criterion: &mut Criterion) {
fn ab(mut q: Query<(&mut A, &mut B)>) {
q.iter_mut().for_each(|(mut a, mut b)| {
std::mem::swap(&mut a.0, &mut b.0);
core::mem::swap(&mut a.0, &mut b.0);
});
}
fn cd(mut q: Query<(&mut C, &mut D)>) {
q.iter_mut().for_each(|(mut c, mut d)| {
std::mem::swap(&mut c.0, &mut d.0);
core::mem::swap(&mut c.0, &mut d.0);
});
}
fn ce(mut q: Query<(&mut C, &mut E)>) {
q.iter_mut().for_each(|(mut c, mut e)| {
std::mem::swap(&mut c.0, &mut e.0);
core::mem::swap(&mut c.0, &mut e.0);
});
}
let mut world = World::new();
let mut group = criterion.benchmark_group("busy_systems");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
for entity_bunches in 1..6 {
world.spawn_batch((0..4 * ENTITY_BUNCH).map(|_| (A(0.0), B(0.0))));
world.spawn_batch((0..4 * ENTITY_BUNCH).map(|_| (A(0.0), B(0.0), C(0.0))));
@ -99,26 +99,26 @@ pub fn busy_systems(criterion: &mut Criterion) {
pub fn contrived(criterion: &mut Criterion) {
fn s_0(mut q_0: Query<(&mut A, &mut B)>) {
q_0.iter_mut().for_each(|(mut c_0, mut c_1)| {
std::mem::swap(&mut c_0.0, &mut c_1.0);
core::mem::swap(&mut c_0.0, &mut c_1.0);
});
}
fn s_1(mut q_0: Query<(&mut A, &mut C)>, mut q_1: Query<(&mut B, &mut D)>) {
q_0.iter_mut().for_each(|(mut c_0, mut c_1)| {
std::mem::swap(&mut c_0.0, &mut c_1.0);
core::mem::swap(&mut c_0.0, &mut c_1.0);
});
q_1.iter_mut().for_each(|(mut c_0, mut c_1)| {
std::mem::swap(&mut c_0.0, &mut c_1.0);
core::mem::swap(&mut c_0.0, &mut c_1.0);
});
}
fn s_2(mut q_0: Query<(&mut C, &mut D)>) {
q_0.iter_mut().for_each(|(mut c_0, mut c_1)| {
std::mem::swap(&mut c_0.0, &mut c_1.0);
core::mem::swap(&mut c_0.0, &mut c_1.0);
});
}
let mut world = World::new();
let mut group = criterion.benchmark_group("contrived");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(3));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(3));
for entity_bunches in 1..6 {
world.spawn_batch((0..ENTITY_BUNCH).map(|_| (A(0.0), B(0.0), C(0.0), D(0.0))));
world.spawn_batch((0..ENTITY_BUNCH).map(|_| (A(0.0), B(0.0))));

View file

@ -16,25 +16,25 @@ pub fn schedule(c: &mut Criterion) {
fn ab(mut query: Query<(&mut A, &mut B)>) {
query.iter_mut().for_each(|(mut a, mut b)| {
std::mem::swap(&mut a.0, &mut b.0);
core::mem::swap(&mut a.0, &mut b.0);
});
}
fn cd(mut query: Query<(&mut C, &mut D)>) {
query.iter_mut().for_each(|(mut c, mut d)| {
std::mem::swap(&mut c.0, &mut d.0);
core::mem::swap(&mut c.0, &mut d.0);
});
}
fn ce(mut query: Query<(&mut C, &mut E)>) {
query.iter_mut().for_each(|(mut c, mut e)| {
std::mem::swap(&mut c.0, &mut e.0);
core::mem::swap(&mut c.0, &mut e.0);
});
}
let mut group = c.benchmark_group("schedule");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("base", |b| {
let mut world = World::default();
@ -68,8 +68,8 @@ pub fn build_schedule(criterion: &mut Criterion) {
struct DummySet;
let mut group = criterion.benchmark_group("build_schedule");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(15));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(15));
// Method: generate a set of `graph_size` systems which have a One True Ordering.
// Add system to the schedule with full constraints. Hopefully this should be maximally

View file

@ -15,8 +15,8 @@ struct C;
pub fn empty_commands(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("empty_commands");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("0_entities", |bencher| {
let mut world = World::default();
@ -32,8 +32,8 @@ pub fn empty_commands(criterion: &mut Criterion) {
pub fn spawn_commands(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("spawn_commands");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in (1..5).map(|i| i * 2 * 1000) {
group.bench_function(format!("{}_entities", entity_count), |bencher| {
@ -69,8 +69,8 @@ struct Vec3([f32; 3]);
pub fn insert_commands(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("insert_commands");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
let entity_count = 10_000;
group.bench_function("insert", |bencher| {
@ -132,8 +132,8 @@ impl Command for FakeCommandB {
pub fn fake_commands(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("fake_commands");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for command_count in (1..5).map(|i| i * 2 * 1000) {
group.bench_function(format!("{}_commands", command_count), |bencher| {
@ -177,8 +177,8 @@ impl Default for LargeStruct {
pub fn sized_commands_impl<T: Default + Command>(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group(format!("sized_commands_{}_bytes", size_of::<T>()));
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for command_count in (1..5).map(|i| i * 2 * 1000) {
group.bench_function(format!("{}_commands", command_count), |bencher| {
@ -212,8 +212,8 @@ pub fn large_sized_commands(criterion: &mut Criterion) {
pub fn get_or_spawn(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("get_or_spawn");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
group.bench_function("individual", |bencher| {
let mut world = World::default();

View file

@ -33,7 +33,7 @@ pub fn entity_set_build_and_lookup(c: &mut Criterion) {
// Get some random-but-consistent entities to use for all the benches below.
let mut rng = ChaCha8Rng::seed_from_u64(size as u64);
let entities =
Vec::from_iter(std::iter::repeat_with(|| make_entity(&mut rng, size)).take(size));
Vec::from_iter(core::iter::repeat_with(|| make_entity(&mut rng, size)).take(size));
group.throughput(Throughput::Elements(size as u64));
group.bench_function(BenchmarkId::new("entity_set_build", size), |bencher| {

View file

@ -9,8 +9,8 @@ struct B(Vec4);
pub fn world_spawn(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("spawn_world");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in (0..5).map(|i| 10_u32.pow(i)) {
group.bench_function(format!("{}_entities", entity_count), |bencher| {

View file

@ -22,7 +22,7 @@ struct WideTable<const X: usize>(f32);
#[component(storage = "SparseSet")]
struct WideSparse<const X: usize>(f32);
const RANGE: std::ops::Range<u32> = 5..6;
const RANGE: core::ops::Range<u32> = 5..6;
fn deterministic_rand() -> ChaCha8Rng {
ChaCha8Rng::seed_from_u64(42)
@ -42,8 +42,8 @@ fn setup_wide<T: Bundle + Default>(entity_count: u32) -> World {
pub fn world_entity(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("world_entity");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_entities", entity_count), |bencher| {
@ -63,8 +63,8 @@ pub fn world_entity(criterion: &mut Criterion) {
pub fn world_get(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("world_get");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
@ -94,8 +94,8 @@ pub fn world_get(criterion: &mut Criterion) {
pub fn world_query_get(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("world_query_get");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
@ -180,8 +180,8 @@ pub fn world_query_get(criterion: &mut Criterion) {
pub fn world_query_iter(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("world_query_iter");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
@ -219,8 +219,8 @@ pub fn world_query_iter(criterion: &mut Criterion) {
pub fn world_query_for_each(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("world_query_for_each");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
@ -258,8 +258,8 @@ pub fn world_query_for_each(criterion: &mut Criterion) {
pub fn query_get(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("query_get");
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(4));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(4));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
@ -307,8 +307,8 @@ pub fn query_get(criterion: &mut Criterion) {
pub fn query_get_many<const N: usize>(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group(&format!("query_get_many_{N}"));
group.warm_up_time(std::time::Duration::from_millis(500));
group.measurement_time(std::time::Duration::from_secs(2 * N as u64));
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(2 * N as u64));
for entity_count in RANGE.map(|i| i * 10_000) {
group.bench_function(format!("{}_calls_table", entity_count), |bencher| {

View file

@ -1,4 +1,4 @@
use std::{iter, time::Duration};
use core::{iter, time::Duration};
use bevy_reflect::{DynamicList, List};
use criterion::{

View file

@ -1,4 +1,4 @@
use std::{fmt::Write, iter, time::Duration};
use core::{fmt::Write, iter, time::Duration};
use bevy_reflect::{DynamicMap, Map};
use bevy_utils::HashMap;

View file

@ -1,4 +1,4 @@
use std::{fmt::Write, str, time::Duration};
use core::{fmt::Write, str, time::Duration};
use bevy_reflect::ParsedPath;
use criterion::{

View file

@ -1,4 +1,4 @@
use std::time::Duration;
use core::time::Duration;
use bevy_reflect::{DynamicStruct, GetField, PartialReflect, Reflect, Struct};
use criterion::{

View file

@ -1,22 +1,22 @@
use bevy_tasks::{ParallelIterator, TaskPoolBuilder};
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
struct ParChunks<'a, T>(std::slice::Chunks<'a, T>);
impl<'a, T> ParallelIterator<std::slice::Iter<'a, T>> for ParChunks<'a, T>
struct ParChunks<'a, T>(core::slice::Chunks<'a, T>);
impl<'a, T> ParallelIterator<core::slice::Iter<'a, T>> for ParChunks<'a, T>
where
T: 'a + Send + Sync,
{
fn next_batch(&mut self) -> Option<std::slice::Iter<'a, T>> {
fn next_batch(&mut self) -> Option<core::slice::Iter<'a, T>> {
self.0.next().map(|s| s.iter())
}
}
struct ParChunksMut<'a, T>(std::slice::ChunksMut<'a, T>);
impl<'a, T> ParallelIterator<std::slice::IterMut<'a, T>> for ParChunksMut<'a, T>
struct ParChunksMut<'a, T>(core::slice::ChunksMut<'a, T>);
impl<'a, T> ParallelIterator<core::slice::IterMut<'a, T>> for ParChunksMut<'a, T>
where
T: 'a + Send + Sync,
{
fn next_batch(&mut self) -> Option<std::slice::IterMut<'a, T>> {
fn next_batch(&mut self) -> Option<core::slice::IterMut<'a, T>> {
self.0.next().map(|s| s.iter_mut())
}
}

View file

@ -7,10 +7,10 @@
//! Accessibility for Bevy
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
extern crate alloc;
use alloc::sync::Arc;
use core::sync::atomic::{AtomicBool, Ordering};
pub use accesskit;
use accesskit::NodeBuilder;

View file

@ -1,9 +1,7 @@
//! The animation graph, which allows animations to be blended together.
use std::{
io::{self, Write},
ops::{Index, IndexMut},
};
use core::ops::{Index, IndexMut};
use std::io::{self, Write};
use bevy_asset::{io::Reader, Asset, AssetId, AssetLoader, AssetPath, Handle, LoadContext};
use bevy_reflect::{Reflect, ReflectSerialize};

View file

@ -1,6 +1,6 @@
//! Keyframes of animation clips.
use std::{
use core::{
any::TypeId,
fmt::{self, Debug, Formatter},
};

View file

@ -7,16 +7,18 @@
//! Animation for the game engine Bevy
extern crate alloc;
pub mod animatable;
pub mod graph;
pub mod keyframes;
pub mod transition;
mod util;
use std::{
use alloc::collections::BTreeMap;
use core::{
any::{Any, TypeId},
cell::RefCell,
collections::BTreeMap,
fmt::Debug,
hash::{Hash, Hasher},
iter,

View file

@ -13,10 +13,9 @@ use bevy_ecs::{
#[cfg(feature = "trace")]
use bevy_utils::tracing::info_span;
use bevy_utils::{tracing::debug, HashMap};
use core::{fmt::Debug, num::NonZero, panic::AssertUnwindSafe};
use std::{
fmt::Debug,
num::NonZero,
panic::{catch_unwind, resume_unwind, AssertUnwindSafe},
panic::{catch_unwind, resume_unwind},
process::{ExitCode, Termination},
};
use thiserror::Error;
@ -77,7 +76,7 @@ pub struct App {
}
impl Debug for App {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "App {{ sub_apps: ")?;
f.debug_map()
.entries(self.sub_apps.sub_apps.iter())
@ -165,8 +164,8 @@ impl App {
panic!("App::run() was called while a plugin was building.");
}
let runner = std::mem::replace(&mut self.runner, Box::new(run_once));
let app = std::mem::replace(self, App::empty());
let runner = core::mem::replace(&mut self.runner, Box::new(run_once));
let app = core::mem::replace(self, App::empty());
(runner)(app)
}
@ -210,7 +209,7 @@ impl App {
let mut overall_plugins_state = match self.main_mut().plugins_state {
PluginsState::Adding => {
let mut state = PluginsState::Ready;
let plugins = std::mem::take(&mut self.main_mut().plugin_registry);
let plugins = core::mem::take(&mut self.main_mut().plugin_registry);
for plugin in &plugins {
// plugins installed to main need to see all sub-apps
if !plugin.ready(self) {
@ -236,7 +235,7 @@ impl App {
/// plugins are ready, but can be useful for situations where you want to use [`App::update`].
pub fn finish(&mut self) {
// plugins installed to main should see all sub-apps
let plugins = std::mem::take(&mut self.main_mut().plugin_registry);
let plugins = core::mem::take(&mut self.main_mut().plugin_registry);
for plugin in &plugins {
plugin.finish(self);
}
@ -250,7 +249,7 @@ impl App {
/// [`App::finish`], but can be useful for situations where you want to use [`App::update`].
pub fn cleanup(&mut self) {
// plugins installed to main should see all sub-apps
let plugins = std::mem::take(&mut self.main_mut().plugin_registry);
let plugins = core::mem::take(&mut self.main_mut().plugin_registry);
for plugin in &plugins {
plugin.cleanup(self);
}
@ -744,7 +743,7 @@ impl App {
#[cfg(feature = "reflect_functions")]
pub fn register_function_with_name<F, Marker>(
&mut self,
name: impl Into<std::borrow::Cow<'static, str>>,
name: impl Into<alloc::borrow::Cow<'static, str>>,
function: F,
) -> &mut Self
where
@ -1116,7 +1115,8 @@ impl Termination for AppExit {
#[cfg(test)]
mod tests {
use std::{iter, marker::PhantomData, sync::Mutex};
use core::{iter, marker::PhantomData};
use std::sync::Mutex;
use bevy_ecs::{
change_detection::{DetectChanges, ResMut},

View file

@ -7,6 +7,8 @@
//! This crate is about everything concerning the highest-level, application layer of a Bevy app.
extern crate alloc;
mod app;
mod main_schedule;
mod panic_handler;

View file

@ -1,7 +1,7 @@
use downcast_rs::{impl_downcast, Downcast};
use crate::App;
use std::any::Any;
use core::any::Any;
/// A collection of Bevy app logic and configuration.
///
@ -82,7 +82,7 @@ pub trait Plugin: Downcast + Any + Send + Sync {
/// Configures a name for the [`Plugin`] which is primarily used for checking plugin
/// uniqueness and debugging.
fn name(&self) -> &str {
std::any::type_name::<Self>()
core::any::type_name::<Self>()
}
/// If the plugin can be meaningfully instantiated several times in an [`App`],

View file

@ -3,7 +3,7 @@ use bevy_utils::{
tracing::{debug, warn},
TypeIdMap,
};
use std::any::TypeId;
use core::any::TypeId;
/// A macro for generating a well-documented [`PluginGroup`] from a list of [`Plugin`] paths.
///
@ -163,7 +163,7 @@ pub trait PluginGroup: Sized {
fn build(self) -> PluginGroupBuilder;
/// Configures a name for the [`PluginGroup`] which is primarily used for debugging.
fn name() -> String {
std::any::type_name::<Self>().to_string()
core::any::type_name::<Self>().to_string()
}
/// Sets the value of the given [`Plugin`], if it exists
fn set<T: Plugin>(self, plugin: T) -> PluginGroupBuilder {
@ -219,7 +219,7 @@ impl PluginGroupBuilder {
Some(i) => i,
None => panic!(
"Plugin does not exist in group: {}.",
std::any::type_name::<Target>()
core::any::type_name::<Target>()
),
}
}
@ -273,7 +273,7 @@ impl PluginGroupBuilder {
let entry = self.plugins.get_mut(&TypeId::of::<T>()).unwrap_or_else(|| {
panic!(
"{} does not exist in this PluginGroup",
std::any::type_name::<T>(),
core::any::type_name::<T>(),
)
});
entry.plugin = Box::new(plugin);
@ -437,9 +437,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginB>(),
std::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginC>(),
]
);
}
@ -454,9 +454,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginC>(),
std::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginB>(),
]
);
}
@ -471,9 +471,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginC>(),
std::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginB>(),
]
);
}
@ -489,9 +489,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginC>(),
std::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginB>(),
]
);
}
@ -507,9 +507,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginC>(),
std::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginB>(),
]
);
}
@ -525,9 +525,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginC>(),
std::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginB>(),
]
);
}
@ -545,9 +545,9 @@ mod tests {
assert_eq!(
group_b.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginB>(),
std::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginC>(),
]
);
}
@ -569,9 +569,9 @@ mod tests {
assert_eq!(
group.order,
vec![
std::any::TypeId::of::<PluginA>(),
std::any::TypeId::of::<PluginB>(),
std::any::TypeId::of::<PluginC>(),
core::any::TypeId::of::<PluginA>(),
core::any::TypeId::of::<PluginB>(),
core::any::TypeId::of::<PluginC>(),
]
);
}

View file

@ -6,9 +6,11 @@ use crate::{
use bevy_utils::{Duration, Instant};
#[cfg(target_arch = "wasm32")]
use std::{cell::RefCell, rc::Rc};
#[cfg(target_arch = "wasm32")]
use wasm_bindgen::{prelude::*, JsCast};
use {
alloc::rc::Rc,
core::cell::RefCell,
wasm_bindgen::{prelude::*, JsCast},
};
/// Determines the method used to run an [`App`]'s [`Schedule`](bevy_ecs::schedule::Schedule).
///

View file

@ -9,7 +9,7 @@ use bevy_ecs::{
#[cfg(feature = "trace")]
use bevy_utils::tracing::info_span;
use bevy_utils::{HashMap, HashSet};
use std::fmt::Debug;
use core::fmt::Debug;
type ExtractFn = Box<dyn Fn(&mut World, &mut World) + Send>;
@ -75,7 +75,7 @@ pub struct SubApp {
}
impl Debug for SubApp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "SubApp")
}
}
@ -109,9 +109,9 @@ impl SubApp {
F: FnOnce(&mut App),
{
let mut app = App::empty();
std::mem::swap(self, &mut app.sub_apps.main);
core::mem::swap(self, &mut app.sub_apps.main);
f(&mut app);
std::mem::swap(self, &mut app.sub_apps.main);
core::mem::swap(self, &mut app.sub_apps.main);
}
/// Returns a reference to the [`World`].
@ -327,7 +327,7 @@ impl SubApp {
where
T: Plugin,
{
self.plugin_names.contains(std::any::type_name::<T>())
self.plugin_names.contains(core::any::type_name::<T>())
}
/// See [`App::get_added_plugins`].
@ -352,7 +352,7 @@ impl SubApp {
match self.plugins_state {
PluginsState::Adding => {
let mut state = PluginsState::Ready;
let plugins = std::mem::take(&mut self.plugin_registry);
let plugins = core::mem::take(&mut self.plugin_registry);
self.run_as_app(|app| {
for plugin in &plugins {
if !plugin.ready(app) {
@ -370,7 +370,7 @@ impl SubApp {
/// Runs [`Plugin::finish`] for each plugin.
pub fn finish(&mut self) {
let plugins = std::mem::take(&mut self.plugin_registry);
let plugins = core::mem::take(&mut self.plugin_registry);
self.run_as_app(|app| {
for plugin in &plugins {
plugin.finish(app);
@ -382,7 +382,7 @@ impl SubApp {
/// Runs [`Plugin::cleanup`] for each plugin.
pub fn cleanup(&mut self) {
let plugins = std::mem::take(&mut self.plugin_registry);
let plugins = core::mem::take(&mut self.plugin_registry);
self.run_as_app(|app| {
for plugin in &plugins {
plugin.cleanup(app);
@ -428,7 +428,7 @@ impl SubApp {
#[cfg(feature = "reflect_functions")]
pub fn register_function_with_name<F, Marker>(
&mut self,
name: impl Into<std::borrow::Cow<'static, str>>,
name: impl Into<alloc::borrow::Cow<'static, str>>,
function: F,
) -> &mut Self
where
@ -472,12 +472,12 @@ impl SubApps {
/// Returns an iterator over the sub-apps (starting with the main one).
pub fn iter(&self) -> impl Iterator<Item = &SubApp> + '_ {
std::iter::once(&self.main).chain(self.sub_apps.values())
core::iter::once(&self.main).chain(self.sub_apps.values())
}
/// Returns a mutable iterator over the sub-apps (starting with the main one).
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut SubApp> + '_ {
std::iter::once(&mut self.main).chain(self.sub_apps.values_mut())
core::iter::once(&mut self.main).chain(self.sub_apps.values_mut())
}
/// Extract data from the main world into the [`SubApp`] with the given label and perform an update if it exists.

View file

@ -1,4 +1,4 @@
use std::sync::atomic::{AtomicBool, Ordering};
use core::sync::atomic::{AtomicBool, Ordering};
use bevy_ecs::event::EventWriter;

View file

@ -2,20 +2,16 @@ use crate::{
self as bevy_asset, Asset, AssetEvent, AssetHandleProvider, AssetId, AssetServer, Handle,
UntypedHandle,
};
use alloc::sync::Arc;
use bevy_ecs::{
prelude::EventWriter,
system::{Res, ResMut, Resource},
};
use bevy_reflect::{Reflect, TypePath};
use bevy_utils::HashMap;
use core::{any::TypeId, iter::Enumerate, marker::PhantomData, sync::atomic::AtomicU32};
use crossbeam_channel::{Receiver, Sender};
use serde::{Deserialize, Serialize};
use std::{
any::TypeId,
iter::Enumerate,
marker::PhantomData,
sync::{atomic::AtomicU32, Arc},
};
use thiserror::Error;
use uuid::Uuid;
@ -83,7 +79,7 @@ impl AssetIndexAllocator {
AssetIndex {
index: self
.next_index
.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
.fetch_add(1, core::sync::atomic::Ordering::Relaxed),
generation: 0,
}
}
@ -238,7 +234,7 @@ impl<A: Asset> DenseAssetStorage<A> {
let new_len = self
.allocator
.next_index
.load(std::sync::atomic::Ordering::Relaxed);
.load(core::sync::atomic::Ordering::Relaxed);
self.storage.resize_with(new_len as usize, || Entry::Some {
value: None,
generation: 0,
@ -579,7 +575,7 @@ impl<A: Asset> Assets<A> {
/// A mutable iterator over [`Assets`].
pub struct AssetsMutIterator<'a, A: Asset> {
queued_events: &'a mut Vec<AssetEvent<A>>,
dense_storage: Enumerate<std::slice::IterMut<'a, Entry<A>>>,
dense_storage: Enumerate<core::slice::IterMut<'a, Entry<A>>>,
hash_map: bevy_utils::hashbrown::hash_map::IterMut<'a, Uuid, A>,
}

View file

@ -1,6 +1,6 @@
use crate::{Asset, AssetId, AssetLoadError, AssetPath, UntypedAssetId};
use bevy_ecs::event::Event;
use std::fmt::Debug;
use core::fmt::Debug;
/// An event emitted when a specific [`Asset`] fails to load.
///
@ -92,7 +92,7 @@ impl<A: Asset> Clone for AssetEvent<A> {
impl<A: Asset> Copy for AssetEvent<A> {}
impl<A: Asset> Debug for AssetEvent<A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::Added { id } => f.debug_struct("Added").field("id", id).finish(),
Self::Modified { id } => f.debug_struct("Modified").field("id", id).finish(),

View file

@ -2,15 +2,15 @@ use crate::{
meta::MetaTransform, Asset, AssetId, AssetIndexAllocator, AssetPath, InternalAssetId,
UntypedAssetId,
};
use alloc::sync::Arc;
use bevy_ecs::prelude::*;
use bevy_reflect::{std_traits::ReflectDefault, Reflect, TypePath};
use crossbeam_channel::{Receiver, Sender};
use disqualified::ShortName;
use std::{
use core::{
any::TypeId,
hash::{Hash, Hasher},
sync::Arc,
};
use crossbeam_channel::{Receiver, Sender};
use disqualified::ShortName;
use thiserror::Error;
use uuid::Uuid;
@ -103,8 +103,8 @@ impl Drop for StrongHandle {
}
}
impl std::fmt::Debug for StrongHandle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Debug for StrongHandle {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("StrongHandle")
.field("id", &self.id)
.field("asset_server_managed", &self.asset_server_managed)
@ -204,8 +204,8 @@ impl<A: Asset> Default for Handle<A> {
}
}
impl<A: Asset> std::fmt::Debug for Handle<A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl<A: Asset> core::fmt::Debug for Handle<A> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let name = ShortName::of::<A>();
match self {
Handle::Strong(handle) => {
@ -229,13 +229,13 @@ impl<A: Asset> Hash for Handle<A> {
}
impl<A: Asset> PartialOrd for Handle<A> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl<A: Asset> Ord for Handle<A> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.id().cmp(&other.id())
}
}
@ -357,7 +357,7 @@ impl UntypedHandle {
let Ok(handle) = self.try_typed() else {
panic!(
"The target Handle<{}>'s TypeId does not match the TypeId of this UntypedHandle",
std::any::type_name::<A>()
core::any::type_name::<A>()
)
};
@ -397,8 +397,8 @@ impl Hash for UntypedHandle {
}
}
impl std::fmt::Debug for UntypedHandle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Debug for UntypedHandle {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
UntypedHandle::Strong(handle) => {
write!(
@ -420,7 +420,7 @@ impl std::fmt::Debug for UntypedHandle {
}
impl PartialOrd for UntypedHandle {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
if self.type_id() == other.type_id() {
self.id().partial_cmp(&other.id())
} else {
@ -454,7 +454,7 @@ impl<A: Asset> PartialEq<Handle<A>> for UntypedHandle {
impl<A: Asset> PartialOrd<UntypedHandle> for Handle<A> {
#[inline]
fn partial_cmp(&self, other: &UntypedHandle) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &UntypedHandle) -> Option<core::cmp::Ordering> {
if TypeId::of::<A>() != other.type_id() {
None
} else {
@ -465,7 +465,7 @@ impl<A: Asset> PartialOrd<UntypedHandle> for Handle<A> {
impl<A: Asset> PartialOrd<Handle<A>> for UntypedHandle {
#[inline]
fn partial_cmp(&self, other: &Handle<A>) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Handle<A>) -> Option<core::cmp::Ordering> {
Some(other.partial_cmp(self)?.reverse())
}
}

View file

@ -3,7 +3,7 @@ use bevy_reflect::Reflect;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use std::{
use core::{
any::TypeId,
fmt::{Debug, Display},
hash::Hash,
@ -86,19 +86,19 @@ impl<A: Asset> Clone for AssetId<A> {
impl<A: Asset> Copy for AssetId<A> {}
impl<A: Asset> Display for AssetId<A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
Debug::fmt(self, f)
}
}
impl<A: Asset> Debug for AssetId<A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
AssetId::Index { index, .. } => {
write!(
f,
"AssetId<{}>{{ index: {}, generation: {}}}",
std::any::type_name::<A>(),
core::any::type_name::<A>(),
index.index,
index.generation
)
@ -107,7 +107,7 @@ impl<A: Asset> Debug for AssetId<A> {
write!(
f,
"AssetId<{}>{{uuid: {}}}",
std::any::type_name::<A>(),
core::any::type_name::<A>(),
uuid
)
}
@ -117,7 +117,7 @@ impl<A: Asset> Debug for AssetId<A> {
impl<A: Asset> Hash for AssetId<A> {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.internal().hash(state);
TypeId::of::<A>().hash(state);
}
@ -133,13 +133,13 @@ impl<A: Asset> PartialEq for AssetId<A> {
impl<A: Asset> Eq for AssetId<A> {}
impl<A: Asset> PartialOrd for AssetId<A> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl<A: Asset> Ord for AssetId<A> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.internal().cmp(&other.internal())
}
}
@ -206,7 +206,7 @@ impl UntypedAssetId {
self.type_id(),
TypeId::of::<A>(),
"The target AssetId<{}>'s TypeId does not match the TypeId of this UntypedAssetId",
std::any::type_name::<A>()
core::any::type_name::<A>()
);
self.typed_unchecked()
}
@ -221,7 +221,7 @@ impl UntypedAssetId {
let Ok(id) = self.try_typed() else {
panic!(
"The target AssetId<{}>'s TypeId does not match the TypeId of this UntypedAssetId",
std::any::type_name::<A>()
core::any::type_name::<A>()
)
};
@ -254,7 +254,7 @@ impl UntypedAssetId {
}
impl Display for UntypedAssetId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut writer = f.debug_struct("UntypedAssetId");
match self {
UntypedAssetId::Index { index, type_id } => {
@ -282,14 +282,14 @@ impl Eq for UntypedAssetId {}
impl Hash for UntypedAssetId {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.internal().hash(state);
self.type_id().hash(state);
}
}
impl Ord for UntypedAssetId {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.type_id()
.cmp(&other.type_id())
.then_with(|| self.internal().cmp(&other.internal()))
@ -297,7 +297,7 @@ impl Ord for UntypedAssetId {
}
impl PartialOrd for UntypedAssetId {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
@ -367,7 +367,7 @@ impl<A: Asset> PartialEq<AssetId<A>> for UntypedAssetId {
impl<A: Asset> PartialOrd<UntypedAssetId> for AssetId<A> {
#[inline]
fn partial_cmp(&self, other: &UntypedAssetId) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &UntypedAssetId) -> Option<core::cmp::Ordering> {
if TypeId::of::<A>() != other.type_id() {
None
} else {
@ -378,7 +378,7 @@ impl<A: Asset> PartialOrd<UntypedAssetId> for AssetId<A> {
impl<A: Asset> PartialOrd<AssetId<A>> for UntypedAssetId {
#[inline]
fn partial_cmp(&self, other: &AssetId<A>) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &AssetId<A>) -> Option<core::cmp::Ordering> {
Some(other.partial_cmp(self)?.reverse())
}
}
@ -436,7 +436,7 @@ mod tests {
/// Simple utility to directly hash a value using a fixed hasher
fn hash<T: Hash>(data: &T) -> u64 {
use std::hash::Hasher;
use core::hash::Hasher;
let mut hasher = bevy_utils::AHasher::default();
data.hash(&mut hasher);

View file

@ -1,8 +1,9 @@
use crate::io::{
get_meta_path, AssetReader, AssetReaderError, EmptyPathStream, PathStream, Reader, VecReader,
};
use alloc::ffi::CString;
use bevy_utils::tracing::error;
use std::{ffi::CString, path::Path};
use std::path::Path;
/// [`AssetReader`] implementation for Android devices, built on top of Android's [`AssetManager`].
///
@ -52,10 +53,7 @@ impl AssetReader for AndroidAssetReader {
Ok(stream)
}
async fn is_directory<'a>(
&'a self,
_path: &'a Path,
) -> std::result::Result<bool, AssetReaderError> {
async fn is_directory<'a>(&'a self, _path: &'a Path) -> Result<bool, AssetReaderError> {
error!("Reading directories is not supported with the AndroidAssetReader");
Ok(false)
}

View file

@ -3,6 +3,7 @@ use crate::io::{
memory::Dir,
AssetSourceEvent, AssetWatcher,
};
use alloc::sync::Arc;
use bevy_utils::{tracing::warn, Duration, HashMap};
use notify_debouncer_full::{notify::RecommendedWatcher, Debouncer, FileIdMap};
use parking_lot::RwLock;
@ -10,7 +11,6 @@ use std::{
fs::File,
io::{BufReader, Read},
path::{Path, PathBuf},
sync::Arc,
};
/// A watcher for assets stored in the `embedded` asset source. Embedded assets are assets whose

View file

@ -22,7 +22,7 @@ pub const EMBEDDED: &str = "embedded";
pub struct EmbeddedAssetRegistry {
dir: Dir,
#[cfg(feature = "embedded_watcher")]
root_paths: std::sync::Arc<parking_lot::RwLock<bevy_utils::HashMap<Box<Path>, PathBuf>>>,
root_paths: alloc::sync::Arc<parking_lot::RwLock<bevy_utils::HashMap<Box<Path>, PathBuf>>>,
}
impl EmbeddedAssetRegistry {
@ -107,7 +107,7 @@ impl EmbeddedAssetRegistry {
dir.clone(),
root_paths.clone(),
sender,
std::time::Duration::from_millis(300),
core::time::Duration::from_millis(300),
)))
})
.with_processed_watcher(move |sender| {
@ -115,7 +115,7 @@ impl EmbeddedAssetRegistry {
processed_dir.clone(),
processed_root_paths.clone(),
sender,
std::time::Duration::from_millis(300),
core::time::Duration::from_millis(300),
)))
});
}

View file

@ -73,7 +73,7 @@ impl FileAssetWriter {
/// watching for changes.
///
/// See `get_base_path` below.
pub fn new<P: AsRef<Path> + std::fmt::Debug>(path: P, create_root: bool) -> Self {
pub fn new<P: AsRef<Path> + core::fmt::Debug>(path: P, create_root: bool) -> Self {
let root_path = get_base_path().join(path.as_ref());
if create_root {
if let Err(e) = std::fs::create_dir_all(&root_path) {

View file

@ -6,12 +6,11 @@ use crate::io::{
Reader, Writer,
};
use core::{pin::Pin, task::Poll};
use std::{
fs::{read_dir, File},
io::{Read, Seek, Write},
path::{Path, PathBuf},
pin::Pin,
task::Poll,
};
use super::{FileAssetReader, FileAssetWriter};
@ -21,7 +20,7 @@ struct FileReader(File);
impl AsyncRead for FileReader {
fn poll_read(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
buf: &mut [u8],
) -> Poll<std::io::Result<usize>> {
let this = self.get_mut();
@ -33,7 +32,7 @@ impl AsyncRead for FileReader {
impl AsyncSeek for FileReader {
fn poll_seek(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
pos: std::io::SeekFrom,
) -> Poll<std::io::Result<u64>> {
let this = self.get_mut();
@ -57,7 +56,7 @@ struct FileWriter(File);
impl AsyncWrite for FileWriter {
fn poll_write(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
buf: &[u8],
) -> Poll<std::io::Result<usize>> {
let this = self.get_mut();
@ -67,7 +66,7 @@ impl AsyncWrite for FileWriter {
fn poll_flush(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
) -> Poll<std::io::Result<()>> {
let this = self.get_mut();
let flushed = this.0.flush();
@ -76,7 +75,7 @@ impl AsyncWrite for FileWriter {
fn poll_close(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
) -> Poll<std::io::Result<()>> {
Poll::Ready(Ok(()))
}
@ -89,7 +88,7 @@ impl Stream for DirReader {
fn poll_next(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
Poll::Ready(this.0.pop())

View file

@ -1,8 +1,9 @@
use crate::io::{AssetReader, AssetReaderError, PathStream, Reader};
use alloc::sync::Arc;
use bevy_utils::HashMap;
use crossbeam_channel::{Receiver, Sender};
use parking_lot::RwLock;
use std::{path::Path, sync::Arc};
use std::path::Path;
/// A "gated" reader that will prevent asset reads from returning until
/// a given path has been "opened" using [`GateOpener`].

View file

@ -1,14 +1,13 @@
use crate::io::{AssetReader, AssetReaderError, PathStream, Reader};
use alloc::sync::Arc;
use bevy_utils::HashMap;
use core::{pin::Pin, task::Poll};
use futures_io::{AsyncRead, AsyncSeek};
use futures_lite::{ready, Stream};
use parking_lot::RwLock;
use std::{
io::SeekFrom,
path::{Path, PathBuf},
pin::Pin,
sync::Arc,
task::Poll,
};
#[derive(Default, Debug)]
@ -153,7 +152,7 @@ impl Stream for DirStream {
fn poll_next(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
let dir = this.dir.0.read();
@ -234,7 +233,7 @@ struct DataReader {
impl AsyncRead for DataReader {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
cx: &mut core::task::Context<'_>,
buf: &mut [u8],
) -> Poll<futures_io::Result<usize>> {
if self.bytes_read >= self.data.value().len() {
@ -251,7 +250,7 @@ impl AsyncRead for DataReader {
impl AsyncSeek for DataReader {
fn poll_seek(
mut self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
pos: SeekFrom,
) -> Poll<std::io::Result<u64>> {
let result = match pos {

View file

@ -21,15 +21,18 @@ mod source;
pub use futures_lite::AsyncWriteExt;
pub use source::*;
use alloc::sync::Arc;
use bevy_utils::{BoxedFuture, ConditionalSendFuture};
use core::{
mem::size_of,
pin::Pin,
task::{Context, Poll},
};
use futures_io::{AsyncRead, AsyncSeek, AsyncWrite};
use futures_lite::{ready, Stream};
use std::{
io::SeekFrom,
path::{Path, PathBuf},
pin::Pin,
sync::Arc,
task::{Context, Poll},
};
use thiserror::Error;
@ -713,7 +716,7 @@ impl Stream for EmptyPathStream {
fn poll_next(
self: Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
_cx: &mut core::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
Poll::Ready(None)
}

View file

@ -3,10 +3,12 @@ use crate::{
processor::{AssetProcessorData, ProcessStatus},
AssetPath,
};
use alloc::sync::Arc;
use async_lock::RwLockReadGuardArc;
use bevy_utils::tracing::trace;
use core::{pin::Pin, task::Poll};
use futures_io::{AsyncRead, AsyncSeek};
use std::{io::SeekFrom, path::Path, pin::Pin, sync::Arc, task::Poll};
use std::{io::SeekFrom, path::Path};
use super::ErasedAssetReader;
@ -133,7 +135,7 @@ impl<'a> TransactionLockedReader<'a> {
impl AsyncRead for TransactionLockedReader<'_> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
cx: &mut core::task::Context<'_>,
buf: &mut [u8],
) -> Poll<futures_io::Result<usize>> {
Pin::new(&mut self.reader).poll_read(cx, buf)
@ -143,7 +145,7 @@ impl AsyncRead for TransactionLockedReader<'_> {
impl AsyncSeek for TransactionLockedReader<'_> {
fn poll_seek(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
cx: &mut core::task::Context<'_>,
pos: SeekFrom,
) -> Poll<std::io::Result<u64>> {
Pin::new(&mut self.reader).poll_seek(cx, pos)

View file

@ -2,13 +2,14 @@ use crate::{
io::{processor_gated::ProcessorGatedReader, AssetSourceEvent, AssetWatcher},
processor::AssetProcessorData,
};
use alloc::sync::Arc;
use atomicow::CowArc;
use bevy_ecs::system::Resource;
use bevy_utils::{
tracing::{error, warn},
Duration, HashMap,
};
use std::{fmt::Display, hash::Hash, sync::Arc};
use core::{fmt::Display, hash::Hash};
use thiserror::Error;
use super::{ErasedAssetReader, ErasedAssetWriter};
@ -27,7 +28,7 @@ pub enum AssetSourceId<'a> {
}
impl<'a> Display for AssetSourceId<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self.as_str() {
None => write!(f, "AssetSourceId::Default"),
Some(v) => write!(f, "AssetSourceId::Name({v})"),
@ -114,7 +115,7 @@ impl From<String> for AssetSourceId<'static> {
}
impl<'a> Hash for AssetSourceId<'a> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.as_str().hash(state);
}
}

View file

@ -106,10 +106,7 @@ impl AssetReader for HttpWasmAssetReader {
Ok(stream)
}
async fn is_directory<'a>(
&'a self,
_path: &'a Path,
) -> std::result::Result<bool, AssetReaderError> {
async fn is_directory<'a>(&'a self, _path: &'a Path) -> Result<bool, AssetReaderError> {
error!("Reading directories is not supported with the HttpWasmAssetReader");
Ok(false)
}

View file

@ -147,6 +147,8 @@
html_favicon_url = "https://bevyengine.org/assets/icon.png"
)]
extern crate alloc;
pub mod io;
pub mod meta;
pub mod processor;
@ -199,6 +201,7 @@ use crate::{
io::{embedded::EmbeddedAssetRegistry, AssetSourceBuilder, AssetSourceBuilders, AssetSourceId},
processor::{AssetProcessor, Process},
};
use alloc::sync::Arc;
use bevy_app::{App, Last, Plugin, PreUpdate};
use bevy_ecs::{
reflect::AppTypeRegistry,
@ -207,7 +210,7 @@ use bevy_ecs::{
};
use bevy_reflect::{FromReflect, GetTypeRegistration, Reflect, TypePath};
use bevy_utils::{tracing::error, HashSet};
use std::{any::TypeId, sync::Arc};
use core::any::TypeId;
#[cfg(all(feature = "file_watcher", not(feature = "multi_threaded")))]
compile_error!(
@ -615,6 +618,7 @@ mod tests {
AssetPlugin, AssetServer, Assets, DependencyLoadState, LoadState,
RecursiveDependencyLoadState,
};
use alloc::sync::Arc;
use bevy_app::{App, Update};
use bevy_core::TaskPoolPlugin;
use bevy_ecs::{
@ -626,7 +630,7 @@ mod tests {
use bevy_reflect::TypePath;
use bevy_utils::{Duration, HashMap};
use serde::{Deserialize, Serialize};
use std::{path::Path, sync::Arc};
use std::path::Path;
use thiserror::Error;
#[derive(Asset, TypePath, Debug, Default)]
@ -1487,7 +1491,7 @@ mod tests {
);
// remove event is emitted
app.update();
let events = std::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let events = core::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let expected_events = vec![
AssetEvent::Added { id },
AssetEvent::Unused { id },
@ -1508,14 +1512,14 @@ mod tests {
// TODO: ideally it doesn't take two updates for the added event to emit
app.update();
let events = std::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let events = core::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let expected_events = vec![AssetEvent::Added { id: a_handle.id() }];
assert_eq!(events, expected_events);
gate_opener.open(dep_path);
loop {
app.update();
let events = std::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let events = core::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
if events.is_empty() {
continue;
}
@ -1529,7 +1533,7 @@ mod tests {
break;
}
app.update();
let events = std::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let events = core::mem::take(&mut app.world_mut().resource_mut::<StoredEvents>().0);
let expected_events = vec![AssetEvent::Added {
id: dep_handle.id(),
}];

View file

@ -9,13 +9,11 @@ use crate::{
use atomicow::CowArc;
use bevy_ecs::world::World;
use bevy_utils::{BoxedFuture, ConditionalSendFuture, HashMap, HashSet};
use core::any::{Any, TypeId};
use downcast_rs::{impl_downcast, Downcast};
use ron::error::SpannedError;
use serde::{Deserialize, Serialize};
use std::{
any::{Any, TypeId},
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use thiserror::Error;
/// Loads an [`Asset`] from a given byte [`Reader`]. This can accept [`AssetLoader::Settings`], which configure how the [`Asset`]
@ -30,7 +28,7 @@ pub trait AssetLoader: Send + Sync + 'static {
/// The settings type used by this [`AssetLoader`].
type Settings: Settings + Default + Serialize + for<'a> Deserialize<'a>;
/// The type of [error](`std::error::Error`) which could be encountered by this loader.
type Error: Into<Box<dyn std::error::Error + Send + Sync + 'static>>;
type Error: Into<Box<dyn core::error::Error + Send + Sync + 'static>>;
/// Asynchronously loads [`AssetLoader::Asset`] (and any other labeled assets) from the bytes provided by [`Reader`].
fn load<'a>(
&'a self,
@ -56,7 +54,7 @@ pub trait ErasedAssetLoader: Send + Sync + 'static {
load_context: LoadContext<'a>,
) -> BoxedFuture<
'a,
Result<ErasedLoadedAsset, Box<dyn std::error::Error + Send + Sync + 'static>>,
Result<ErasedLoadedAsset, Box<dyn core::error::Error + Send + Sync + 'static>>,
>;
/// Returns a list of extensions supported by this asset loader, without the preceding dot.
@ -87,7 +85,7 @@ where
mut load_context: LoadContext<'a>,
) -> BoxedFuture<
'a,
Result<ErasedLoadedAsset, Box<dyn std::error::Error + Send + Sync + 'static>>,
Result<ErasedLoadedAsset, Box<dyn core::error::Error + Send + Sync + 'static>>,
> {
Box::pin(async move {
let settings = meta
@ -119,7 +117,7 @@ where
}
fn type_name(&self) -> &'static str {
std::any::type_name::<L>()
core::any::type_name::<L>()
}
fn type_id(&self) -> TypeId {
@ -127,7 +125,7 @@ where
}
fn asset_type_name(&self) -> &'static str {
std::any::type_name::<L::Asset>()
core::any::type_name::<L::Asset>()
}
fn asset_type_id(&self) -> TypeId {
@ -288,7 +286,7 @@ impl<A: Asset> AssetContainer for A {
}
fn asset_type_name(&self) -> &'static str {
std::any::type_name::<A>()
core::any::type_name::<A>()
}
}

View file

@ -7,7 +7,8 @@ use crate::{
Asset, AssetLoadError, AssetPath, ErasedAssetLoader, ErasedLoadedAsset, Handle, LoadContext,
LoadDirectError, LoadedAsset, LoadedUntypedAsset,
};
use std::{any::TypeId, sync::Arc};
use alloc::sync::Arc;
use core::any::TypeId;
// Utility type for handling the sources of reader references
enum ReaderRef<'a> {

View file

@ -220,7 +220,7 @@ pub(crate) fn meta_transform_settings<S: Settings>(
} else {
error!(
"Configured settings type {} does not match AssetLoader settings type",
std::any::type_name::<S>(),
core::any::type_name::<S>(),
);
}
}

View file

@ -1,13 +1,13 @@
use crate::io::AssetSourceId;
use atomicow::CowArc;
use bevy_reflect::{Reflect, ReflectDeserialize, ReflectSerialize};
use serde::{de::Visitor, Deserialize, Serialize};
use std::{
use core::{
fmt::{Debug, Display},
hash::Hash,
ops::Deref,
path::{Path, PathBuf},
};
use serde::{de::Visitor, Deserialize, Serialize};
use std::path::{Path, PathBuf};
use thiserror::Error;
/// Represents a path to an asset in a "virtual filesystem".
@ -57,13 +57,13 @@ pub struct AssetPath<'a> {
}
impl<'a> Debug for AssetPath<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
Display::fmt(self, f)
}
}
impl<'a> Display for AssetPath<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if let AssetSourceId::Name(name) = self.source() {
write!(f, "{name}://")?;
}
@ -588,7 +588,7 @@ struct AssetPathVisitor;
impl<'de> Visitor<'de> for AssetPathVisitor {
type Value = AssetPath<'static>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result {
formatter.write_str("string AssetPath")
}

View file

@ -56,6 +56,7 @@ use crate::{
AssetLoadError, AssetMetaCheck, AssetPath, AssetServer, AssetServerMode, DeserializeMetaError,
MissingAssetLoaderForExtensionError,
};
use alloc::{collections::VecDeque, sync::Arc};
use bevy_ecs::prelude::*;
use bevy_tasks::IoTaskPool;
use bevy_utils::{
@ -70,11 +71,7 @@ use bevy_utils::{
use futures_io::ErrorKind;
use futures_lite::{AsyncReadExt, AsyncWriteExt, StreamExt};
use parking_lot::RwLock;
use std::{
collections::VecDeque,
path::{Path, PathBuf},
sync::Arc,
};
use std::path::{Path, PathBuf};
use thiserror::Error;
/// A "background" asset processor that reads asset values from a source [`AssetSource`] (which corresponds to an [`AssetReader`](crate::io::AssetReader) / [`AssetWriter`](crate::io::AssetWriter) pair),
@ -507,7 +504,7 @@ impl AssetProcessor {
async fn try_reprocessing_queued(&self) {
loop {
let mut check_reprocess_queue =
std::mem::take(&mut self.data.asset_infos.write().await.check_reprocess_queue);
core::mem::take(&mut self.data.asset_infos.write().await.check_reprocess_queue);
IoTaskPool::get().scope(|scope| {
for path in check_reprocess_queue.drain(..) {
let processor = self.clone();
@ -529,13 +526,13 @@ impl AssetProcessor {
let mut process_plans = self.data.processors.write();
#[cfg(feature = "trace")]
let processor = InstrumentedAssetProcessor(processor);
process_plans.insert(std::any::type_name::<P>(), Arc::new(processor));
process_plans.insert(core::any::type_name::<P>(), Arc::new(processor));
}
/// Set the default processor for the given `extension`. Make sure `P` is registered with [`AssetProcessor::register_processor`].
pub fn set_default_processor<P: Process>(&self, extension: &str) {
let mut default_processors = self.data.default_processors.write();
default_processors.insert(extension.into(), std::any::type_name::<P>());
default_processors.insert(extension.into(), core::any::type_name::<P>());
}
/// Returns the default processor for the given `extension`, if it exists.
@ -946,7 +943,7 @@ impl AssetProcessor {
}
LogEntryError::UnfinishedTransaction(path) => {
debug!("Asset {path:?} did not finish processing. Clearing state for that asset");
let mut unrecoverable_err = |message: &dyn std::fmt::Display| {
let mut unrecoverable_err = |message: &dyn core::fmt::Display| {
error!("Failed to remove asset {path:?}: {message}");
state_is_valid = false;
};
@ -1112,7 +1109,7 @@ impl<T: Process> Process for InstrumentedAssetProcessor<T> {
};
let span = info_span!(
"asset processing",
processor = std::any::type_name::<T>(),
processor = core::any::type_name::<T>(),
asset = context.path().to_string(),
);
self.0.process(context, meta, writer).instrument(span)
@ -1350,7 +1347,7 @@ impl ProcessorAssetInfos {
info.dependants
);
self.non_existent_dependants
.insert(old.clone(), std::mem::take(&mut info.dependants));
.insert(old.clone(), core::mem::take(&mut info.dependants));
}
if let Some(processed_info) = &info.processed_info {
// Update "dependant" lists for this asset's "process dependencies" to use new path.

View file

@ -11,8 +11,8 @@ use crate::{
MissingAssetLoaderForExtensionError, MissingAssetLoaderForTypeNameError,
};
use bevy_utils::{BoxedFuture, ConditionalSendFuture};
use core::marker::PhantomData;
use serde::{Deserialize, Serialize};
use std::marker::PhantomData;
use thiserror::Error;
/// Asset "processor" logic that reads input asset bytes (stored on [`ProcessContext`]), processes the value in some way,
@ -162,9 +162,9 @@ pub enum ProcessError {
#[error("The wrong meta type was passed into a processor. This is probably an internal implementation error.")]
WrongMetaType,
#[error("Encountered an error while saving the asset: {0}")]
AssetSaveError(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
AssetSaveError(#[from] Box<dyn core::error::Error + Send + Sync + 'static>),
#[error("Encountered an error while transforming the asset: {0}")]
AssetTransformError(Box<dyn std::error::Error + Send + Sync + 'static>),
AssetTransformError(Box<dyn core::error::Error + Send + Sync + 'static>),
#[error("Assets without extensions are not supported.")]
ExtensionRequired,
}
@ -189,7 +189,7 @@ where
return Err(ProcessError::WrongMetaType);
};
let loader_meta = AssetMeta::<Loader, ()>::new(AssetAction::Load {
loader: std::any::type_name::<Loader>().to_string(),
loader: core::any::type_name::<Loader>().to_string(),
settings: settings.loader_settings,
});
let pre_transformed_asset = TransformedAsset::<Loader::Asset>::from_loaded(
@ -246,7 +246,7 @@ impl<P: Process> ErasedProcessor for P {
let loader_settings = <P as Process>::process(self, context, *meta, writer).await?;
let output_meta: Box<dyn AssetMetaDyn> =
Box::new(AssetMeta::<P::OutputLoader, ()>::new(AssetAction::Load {
loader: std::any::type_name::<P::OutputLoader>().to_string(),
loader: core::any::type_name::<P::OutputLoader>().to_string(),
settings: loader_settings,
}));
Ok(output_meta)
@ -260,7 +260,7 @@ impl<P: Process> ErasedProcessor for P {
fn default_meta(&self) -> Box<dyn AssetMetaDyn> {
Box::new(AssetMeta::<(), P>::new(AssetAction::Process {
processor: std::any::type_name::<P>().to_string(),
processor: core::any::type_name::<P>().to_string(),
settings: P::Settings::default(),
}))
}
@ -316,7 +316,7 @@ impl<'a> ProcessContext<'a> {
meta: AssetMeta<L, ()>,
) -> Result<ErasedLoadedAsset, AssetLoadError> {
let server = &self.processor.server;
let loader_name = std::any::type_name::<L>();
let loader_name = core::any::type_name::<L>();
let loader = server.get_asset_loader_with_type_name(loader_name).await?;
let mut reader = SliceReader::new(self.asset_bytes);
let loaded_asset = server

View file

@ -1,4 +1,4 @@
use std::any::{Any, TypeId};
use core::any::{Any, TypeId};
use bevy_ecs::world::{unsafe_world_cell::UnsafeWorldCell, World};
use bevy_reflect::{FromReflect, FromType, PartialReflect, Reflect};
@ -243,7 +243,7 @@ impl<A: Asset> FromType<Handle<A>> for ReflectHandle {
#[cfg(test)]
mod tests {
use std::any::TypeId;
use core::any::TypeId;
use crate as bevy_asset;
use crate::{Asset, AssetApp, AssetPlugin, ReflectAsset, UntypedHandle};

View file

@ -4,8 +4,8 @@ use crate::{
};
use atomicow::CowArc;
use bevy_utils::{BoxedFuture, ConditionalSendFuture, HashMap};
use core::{borrow::Borrow, hash::Hash, ops::Deref};
use serde::{Deserialize, Serialize};
use std::{borrow::Borrow, hash::Hash, ops::Deref};
/// Saves an [`Asset`] of a given [`AssetSaver::Asset`] type. [`AssetSaver::OutputLoader`] will then be used to load the saved asset
/// in the final deployed application. The saver should produce asset bytes in a format that [`AssetSaver::OutputLoader`] can read.
@ -21,7 +21,7 @@ pub trait AssetSaver: Send + Sync + 'static {
/// The type of [`AssetLoader`] used to load this [`Asset`]
type OutputLoader: AssetLoader;
/// The type of [error](`std::error::Error`) which could be encountered by this saver.
type Error: Into<Box<dyn std::error::Error + Send + Sync + 'static>>;
type Error: Into<Box<dyn core::error::Error + Send + Sync + 'static>>;
/// Saves the given runtime [`Asset`] by writing it to a byte format using `writer`. The passed in `settings` can influence how the
/// `asset` is saved.
@ -44,7 +44,7 @@ pub trait ErasedAssetSaver: Send + Sync + 'static {
writer: &'a mut Writer,
asset: &'a ErasedLoadedAsset,
settings: &'a dyn Settings,
) -> BoxedFuture<'a, Result<(), Box<dyn std::error::Error + Send + Sync + 'static>>>;
) -> BoxedFuture<'a, Result<(), Box<dyn core::error::Error + Send + Sync + 'static>>>;
/// The type name of the [`AssetSaver`].
fn type_name(&self) -> &'static str;
@ -56,7 +56,7 @@ impl<S: AssetSaver> ErasedAssetSaver for S {
writer: &'a mut Writer,
asset: &'a ErasedLoadedAsset,
settings: &'a dyn Settings,
) -> BoxedFuture<'a, Result<(), Box<dyn std::error::Error + Send + Sync + 'static>>> {
) -> BoxedFuture<'a, Result<(), Box<dyn core::error::Error + Send + Sync + 'static>>> {
Box::pin(async move {
let settings = settings
.downcast_ref::<S::Settings>()
@ -69,7 +69,7 @@ impl<S: AssetSaver> ErasedAssetSaver for S {
})
}
fn type_name(&self) -> &'static str {
std::any::type_name::<S>()
core::any::type_name::<S>()
}
}

View file

@ -4,14 +4,12 @@ use crate::{
Handle, InternalAssetEvent, LoadState, RecursiveDependencyLoadState, StrongHandle,
UntypedAssetId, UntypedHandle,
};
use alloc::sync::{Arc, Weak};
use bevy_ecs::world::World;
use bevy_tasks::Task;
use bevy_utils::{tracing::warn, Entry, HashMap, HashSet, TypeIdMap};
use core::any::TypeId;
use crossbeam_channel::Sender;
use std::{
any::TypeId,
sync::{Arc, Weak},
};
use thiserror::Error;
#[derive(Debug)]
@ -79,8 +77,8 @@ pub(crate) struct AssetInfos {
pub(crate) pending_tasks: HashMap<UntypedAssetId, Task<()>>,
}
impl std::fmt::Debug for AssetInfos {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Debug for AssetInfos {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("AssetInfos")
.field("path_to_id", &self.path_to_id)
.field("infos", &self.infos)
@ -164,7 +162,7 @@ impl AssetInfos {
);
// it is ok to unwrap because TypeId was specified above
let (handle, should_load) =
unwrap_with_context(result, std::any::type_name::<A>()).unwrap();
unwrap_with_context(result, core::any::type_name::<A>()).unwrap();
(handle.typed_unchecked(), should_load)
}
@ -499,7 +497,7 @@ impl AssetInfos {
rec_dep_load_state,
RecursiveDependencyLoadState::Loaded | RecursiveDependencyLoadState::Failed(_)
) {
Some(std::mem::take(
Some(core::mem::take(
&mut info.dependants_waiting_on_recursive_dep_load,
))
} else {
@ -507,7 +505,7 @@ impl AssetInfos {
};
(
std::mem::take(&mut info.dependants_waiting_on_load),
core::mem::take(&mut info.dependants_waiting_on_load),
dependants_waiting_on_rec_load,
)
};
@ -560,7 +558,7 @@ impl AssetInfos {
.send(InternalAssetEvent::LoadedWithDependencies { id: waiting_id })
.unwrap();
}
Some(std::mem::take(
Some(core::mem::take(
&mut info.dependants_waiting_on_recursive_dep_load,
))
} else {
@ -588,7 +586,7 @@ impl AssetInfos {
info.loading_rec_dependencies.remove(&failed_id);
info.failed_rec_dependencies.insert(failed_id);
info.rec_dep_load_state = RecursiveDependencyLoadState::Failed(error.clone());
Some(std::mem::take(
Some(core::mem::take(
&mut info.dependants_waiting_on_recursive_dep_load,
))
} else {
@ -618,8 +616,8 @@ impl AssetInfos {
info.dep_load_state = DependencyLoadState::Failed(error.clone());
info.rec_dep_load_state = RecursiveDependencyLoadState::Failed(error.clone());
(
std::mem::take(&mut info.dependants_waiting_on_load),
std::mem::take(&mut info.dependants_waiting_on_recursive_dep_load),
core::mem::take(&mut info.dependants_waiting_on_load),
core::mem::take(&mut info.dependants_waiting_on_recursive_dep_load),
)
};

View file

@ -2,6 +2,7 @@ use crate::{
loader::{AssetLoader, ErasedAssetLoader},
path::AssetPath,
};
use alloc::sync::Arc;
use async_broadcast::RecvError;
use bevy_tasks::IoTaskPool;
use bevy_utils::{
@ -13,7 +14,7 @@ use bevy_utils::{
tracing::{info_span, instrument::Instrument},
ConditionalSendFuture,
};
use std::{any::TypeId, sync::Arc};
use core::any::TypeId;
use thiserror::Error;
#[derive(Default)]
@ -33,9 +34,9 @@ impl AssetLoaders {
/// Registers a new [`AssetLoader`]. [`AssetLoader`]s must be registered before they can be used.
pub(crate) fn push<L: AssetLoader>(&mut self, loader: L) {
let type_name = std::any::type_name::<L>();
let type_name = core::any::type_name::<L>();
let loader_asset_type = TypeId::of::<L::Asset>();
let loader_asset_type_name = std::any::type_name::<L::Asset>();
let loader_asset_type_name = core::any::type_name::<L::Asset>();
#[cfg(feature = "trace")]
let loader = InstrumentedAssetLoader(loader);
@ -80,7 +81,7 @@ impl AssetLoaders {
self.loaders.push(MaybeAssetLoader::Ready(loader));
} else {
let maybe_loader = std::mem::replace(
let maybe_loader = core::mem::replace(
self.loaders.get_mut(loader_index).unwrap(),
MaybeAssetLoader::Ready(loader.clone()),
);
@ -103,8 +104,8 @@ impl AssetLoaders {
/// real loader is added.
pub(crate) fn reserve<L: AssetLoader>(&mut self, extensions: &[&str]) {
let loader_asset_type = TypeId::of::<L::Asset>();
let loader_asset_type_name = std::any::type_name::<L::Asset>();
let type_name = std::any::type_name::<L>();
let loader_asset_type_name = core::any::type_name::<L::Asset>();
let type_name = core::any::type_name::<L>();
let loader_index = self.loaders.len();
@ -268,7 +269,7 @@ impl AssetLoaders {
pub(crate) fn get_by_path(&self, path: &AssetPath<'_>) -> Option<MaybeAssetLoader> {
let extension = path.get_full_extension()?;
let result = std::iter::once(extension.as_str())
let result = core::iter::once(extension.as_str())
.chain(AssetPath::iter_secondary_extensions(&extension))
.filter_map(|extension| self.extension_to_loaders.get(extension)?.last().copied())
.find_map(|index| self.get_by_index(index))?;
@ -318,7 +319,7 @@ impl<T: AssetLoader> AssetLoader for InstrumentedAssetLoader<T> {
) -> impl ConditionalSendFuture<Output = Result<Self::Asset, Self::Error>> {
let span = info_span!(
"asset loading",
loader = std::any::type_name::<T>(),
loader = core::any::type_name::<T>(),
asset = load_context.asset_path().to_string(),
);
self.0.load(reader, settings, load_context).instrument(span)
@ -331,8 +332,8 @@ impl<T: AssetLoader> AssetLoader for InstrumentedAssetLoader<T> {
#[cfg(test)]
mod tests {
use core::marker::PhantomData;
use std::{
marker::PhantomData,
path::Path,
sync::mpsc::{channel, Receiver, Sender},
};
@ -392,7 +393,7 @@ mod tests {
Err(format!(
"Loaded {}:{}",
std::any::type_name::<Self::Asset>(),
core::any::type_name::<Self::Asset>(),
N
))
}
@ -426,7 +427,7 @@ mod tests {
let loader = block_on(
loaders
.get_by_name(std::any::type_name::<Loader<A, 1, 0>>())
.get_by_name(core::any::type_name::<Loader<A, 1, 0>>())
.unwrap()
.get(),
)

View file

@ -17,6 +17,7 @@ use crate::{
DeserializeMetaError, ErasedLoadedAsset, Handle, LoadedUntypedAsset, UntypedAssetId,
UntypedAssetLoadFailedEvent, UntypedHandle,
};
use alloc::sync::Arc;
use atomicow::CowArc;
use bevy_ecs::prelude::*;
use bevy_tasks::IoTaskPool;
@ -24,18 +25,17 @@ use bevy_utils::{
tracing::{error, info},
HashSet,
};
use core::{
any::{Any, TypeId},
future::Future,
panic::AssertUnwindSafe,
};
use crossbeam_channel::{Receiver, Sender};
use futures_lite::{FutureExt, StreamExt};
use info::*;
use loaders::*;
use parking_lot::RwLock;
use std::{
any::{Any, TypeId},
future::Future,
panic::AssertUnwindSafe,
path::{Path, PathBuf},
sync::Arc,
};
use std::path::{Path, PathBuf};
use thiserror::Error;
/// Loads and tracks the state of [`Asset`] values from a configured [`AssetReader`](crate::io::AssetReader). This can be used to kick off new asset loads and
@ -733,13 +733,13 @@ impl AssetServer {
///
/// After the asset has been fully loaded, it will show up in the relevant [`Assets`] storage.
#[must_use = "not using the returned strong handle may result in the unexpected release of the asset"]
pub fn add_async<A: Asset, E: std::error::Error + Send + Sync + 'static>(
pub fn add_async<A: Asset, E: core::error::Error + Send + Sync + 'static>(
&self,
future: impl Future<Output = Result<A, E>> + Send + 'static,
) -> Handle<A> {
let mut infos = self.data.infos.write();
let handle =
infos.create_loading_handle_untyped(TypeId::of::<A>(), std::any::type_name::<A>());
infos.create_loading_handle_untyped(TypeId::of::<A>(), core::any::type_name::<A>());
let id = handle.id();
let event_sender = self.data.asset_event_sender.clone();
@ -1523,7 +1523,7 @@ pub enum AssetLoadError {
pub struct AssetLoaderError {
path: AssetPath<'static>,
loader_name: &'static str,
error: Arc<dyn std::error::Error + Send + Sync + 'static>,
error: Arc<dyn core::error::Error + Send + Sync + 'static>,
}
impl PartialEq for AssetLoaderError {
@ -1547,7 +1547,7 @@ impl AssetLoaderError {
#[derive(Error, Debug, Clone)]
#[error("An error occurred while resolving an asset added by `add_async`: {error}")]
pub struct AddAsyncError {
error: Arc<dyn std::error::Error + Send + Sync + 'static>,
error: Arc<dyn core::error::Error + Send + Sync + 'static>,
}
impl PartialEq for AddAsyncError {
@ -1593,8 +1593,8 @@ fn format_missing_asset_ext(exts: &[String]) -> String {
}
}
impl std::fmt::Debug for AssetServer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Debug for AssetServer {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("AssetServer")
.field("info", &self.data.infos.read())
.finish()

View file

@ -1,14 +1,14 @@
use crate::{meta::Settings, Asset, ErasedLoadedAsset, Handle, LabeledAsset, UntypedHandle};
use atomicow::CowArc;
use bevy_utils::{ConditionalSendFuture, HashMap};
use serde::{Deserialize, Serialize};
use std::{
use core::{
borrow::Borrow,
convert::Infallible,
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::{Deserialize, Serialize};
/// Transforms an [`Asset`] of a given [`AssetTransformer::AssetInput`] type to an [`Asset`] of [`AssetTransformer::AssetOutput`] type.
///
@ -21,7 +21,7 @@ pub trait AssetTransformer: Send + Sync + 'static {
/// The settings type used by this [`AssetTransformer`].
type Settings: Settings + Default + Serialize + for<'a> Deserialize<'a>;
/// The type of [error](`std::error::Error`) which could be encountered by this transformer.
type Error: Into<Box<dyn std::error::Error + Send + Sync + 'static>>;
type Error: Into<Box<dyn core::error::Error + Send + Sync + 'static>>;
/// Transforms the given [`TransformedAsset`] to [`AssetTransformer::AssetOutput`].
/// The [`TransformedAsset`]'s `labeled_assets` can be altered to add new Labeled Sub-Assets

View file

@ -33,7 +33,7 @@ impl Default for AudioOutput {
fn default() -> Self {
if let Ok((stream, stream_handle)) = OutputStream::try_default() {
// We leak `OutputStream` to prevent the audio from stopping.
std::mem::forget(stream);
core::mem::forget(stream);
Self {
stream_handle: Some(stream_handle),
}

View file

@ -1,6 +1,7 @@
use alloc::sync::Arc;
use bevy_asset::{io::Reader, Asset, AssetLoader, LoadContext};
use bevy_reflect::TypePath;
use std::{io::Cursor, sync::Arc};
use std::io::Cursor;
/// A source of audio data
#[derive(Asset, Debug, Clone, TypePath)]

View file

@ -27,6 +27,8 @@
//! }
//! ```
extern crate alloc;
mod audio;
mod audio_output;
mod audio_source;

View file

@ -12,12 +12,12 @@ pub struct Pitch {
/// Frequency at which sound will be played
pub frequency: f32,
/// Duration for which sound will be played
pub duration: std::time::Duration,
pub duration: core::time::Duration,
}
impl Pitch {
/// Creates a new note
pub fn new(frequency: f32, duration: std::time::Duration) -> Self {
pub fn new(frequency: f32, duration: core::time::Duration) -> Self {
Pitch {
frequency,
duration,

View file

@ -110,8 +110,8 @@ pub struct TestColor {{
struct VariablePrecision(f32);
impl std::fmt::Display for VariablePrecision {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Display for VariablePrecision {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
if self.0.fract() == 0.0 {
return write!(f, "{}.0", self.0);
}

View file

@ -137,7 +137,7 @@ pub(crate) fn lerp_hue(a: f32, b: f32, t: f32) -> f32 {
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use core::fmt::Debug;
use super::*;
use crate::{testing::assert_approx_eq, Hsla};

View file

@ -1,4 +1,4 @@
use std::ops::Range;
use core::ops::Range;
use crate::Mix;

View file

@ -160,7 +160,7 @@ where
macro_rules! impl_componentwise_vector_space {
($ty: ident, [$($element: ident),+]) => {
impl std::ops::Add<Self> for $ty {
impl core::ops::Add<Self> for $ty {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
@ -170,13 +170,13 @@ macro_rules! impl_componentwise_vector_space {
}
}
impl std::ops::AddAssign<Self> for $ty {
impl core::ops::AddAssign<Self> for $ty {
fn add_assign(&mut self, rhs: Self) {
*self = *self + rhs;
}
}
impl std::ops::Neg for $ty {
impl core::ops::Neg for $ty {
type Output = Self;
fn neg(self) -> Self::Output {
@ -186,7 +186,7 @@ macro_rules! impl_componentwise_vector_space {
}
}
impl std::ops::Sub<Self> for $ty {
impl core::ops::Sub<Self> for $ty {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
@ -196,13 +196,13 @@ macro_rules! impl_componentwise_vector_space {
}
}
impl std::ops::SubAssign<Self> for $ty {
impl core::ops::SubAssign<Self> for $ty {
fn sub_assign(&mut self, rhs: Self) {
*self = *self - rhs;
}
}
impl std::ops::Mul<f32> for $ty {
impl core::ops::Mul<f32> for $ty {
type Output = Self;
fn mul(self, rhs: f32) -> Self::Output {
@ -212,7 +212,7 @@ macro_rules! impl_componentwise_vector_space {
}
}
impl std::ops::Mul<$ty> for f32 {
impl core::ops::Mul<$ty> for f32 {
type Output = $ty;
fn mul(self, rhs: $ty) -> Self::Output {
@ -222,13 +222,13 @@ macro_rules! impl_componentwise_vector_space {
}
}
impl std::ops::MulAssign<f32> for $ty {
impl core::ops::MulAssign<f32> for $ty {
fn mul_assign(&mut self, rhs: f32) {
*self = *self * rhs;
}
}
impl std::ops::Div<f32> for $ty {
impl core::ops::Div<f32> for $ty {
type Output = Self;
fn div(self, rhs: f32) -> Self::Output {
@ -238,7 +238,7 @@ macro_rules! impl_componentwise_vector_space {
}
}
impl std::ops::DivAssign<f32> for $ty {
impl core::ops::DivAssign<f32> for $ty {
fn div_assign(&mut self, rhs: f32) {
*self = *self / rhs;
}

View file

@ -425,7 +425,7 @@ impl From<Srgba> for Xyza {
pub enum HexColorError {
/// Parsing error.
#[error("Invalid hex string")]
Parse(#[from] std::num::ParseIntError),
Parse(#[from] core::num::ParseIntError),
/// Invalid length.
#[error("Unexpected length of hex string")]
Length,

View file

@ -7,6 +7,8 @@
//! This crate provides core functionality for Bevy Engine.
extern crate alloc;
mod name;
#[cfg(feature = "serialize")]
mod serde;
@ -29,7 +31,7 @@ pub mod prelude {
use bevy_app::prelude::*;
use bevy_ecs::prelude::*;
use std::marker::PhantomData;
use core::marker::PhantomData;
#[cfg(not(target_arch = "wasm32"))]
use bevy_tasks::tick_global_task_pools_on_main_thread;

View file

@ -2,13 +2,13 @@
use bevy_ecs::reflect::ReflectComponent;
use bevy_ecs::{component::Component, entity::Entity, query::QueryData};
use alloc::borrow::Cow;
#[cfg(feature = "bevy_reflect")]
use bevy_reflect::std_traits::ReflectDefault;
#[cfg(feature = "bevy_reflect")]
use bevy_reflect::Reflect;
use bevy_utils::AHasher;
use std::{
borrow::Cow,
use core::{
hash::{Hash, Hasher},
ops::Deref,
};
@ -86,17 +86,17 @@ impl Name {
}
}
impl std::fmt::Display for Name {
impl core::fmt::Display for Name {
#[inline(always)]
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(&self.name, f)
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
core::fmt::Display::fmt(&self.name, f)
}
}
impl std::fmt::Debug for Name {
impl core::fmt::Debug for Name {
#[inline(always)]
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Debug::fmt(&self.name, f)
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
core::fmt::Debug::fmt(&self.name, f)
}
}
@ -130,12 +130,12 @@ pub struct NameOrEntity {
pub entity: Entity,
}
impl<'a> std::fmt::Display for NameOrEntityItem<'a> {
impl<'a> core::fmt::Display for NameOrEntityItem<'a> {
#[inline(always)]
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self.name {
Some(name) => std::fmt::Display::fmt(name, f),
None => std::fmt::Display::fmt(&self.entity, f),
Some(name) => core::fmt::Display::fmt(name, f),
None => core::fmt::Display::fmt(&self.entity, f),
}
}
}
@ -196,13 +196,13 @@ impl PartialEq for Name {
impl Eq for Name {}
impl PartialOrd for Name {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Name {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.name.cmp(&other.name)
}
}

View file

@ -1,4 +1,4 @@
use std::{
use core::{
any,
fmt::{self, Formatter},
};

View file

@ -10,7 +10,7 @@ use bevy_render::{
texture::Image,
view::ViewUniform,
};
use std::num::NonZero;
use core::num::NonZero;
#[derive(Resource)]
pub struct AutoExposurePipeline {

View file

@ -1,4 +1,4 @@
use std::ops::RangeInclusive;
use core::ops::RangeInclusive;
use super::compensation_curve::AutoExposureCompensationCurve;
use bevy_asset::Handle;

View file

@ -30,7 +30,7 @@ pub mod graph {
}
}
use std::ops::Range;
use core::ops::Range;
use bevy_asset::UntypedAssetId;
use bevy_utils::HashMap;

View file

@ -12,7 +12,7 @@ use bevy_render::{
use bevy_utils::tracing::error;
#[cfg(feature = "trace")]
use bevy_utils::tracing::info_span;
use std::ops::Range;
use core::ops::Range;
/// A [`bevy_render::render_graph::Node`] that runs the [`Transmissive3d`]
/// [`ViewSortedRenderPhases`].

View file

@ -63,7 +63,7 @@ pub const DEPTH_TEXTURE_SAMPLING_SUPPORTED: bool = false;
#[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))]
pub const DEPTH_TEXTURE_SAMPLING_SUPPORTED: bool = true;
use std::ops::Range;
use core::ops::Range;
use bevy_asset::{AssetId, UntypedAssetId};
use bevy_color::LinearRgba;

View file

@ -1,7 +1,7 @@
pub mod copy_lighting_id;
pub mod node;
use std::ops::Range;
use core::ops::Range;
use bevy_ecs::prelude::*;
use bevy_render::{

View file

@ -27,7 +27,7 @@
pub mod node;
use std::ops::Range;
use core::ops::Range;
use bevy_asset::UntypedAssetId;
use bevy_ecs::prelude::*;

View file

@ -1,5 +1,5 @@
use bevy_derive::DerefMut;
use std::ops::Deref;
use core::ops::Deref;
// Reason: `#[deref]` doesn't take any arguments

View file

@ -1,5 +1,5 @@
use bevy_derive::DerefMut;
use std::ops::Deref;
use core::ops::Deref;
#[derive(DerefMut)]
//~^ E0308

View file

@ -1,5 +1,5 @@
use bevy_derive::DerefMut;
use std::ops::Deref;
use core::ops::Deref;
#[derive(DerefMut)]
//~^ ERROR: requires one field to have

View file

@ -1,5 +1,5 @@
use bevy_derive::DerefMut;
use std::ops::Deref;
use core::ops::Deref;
#[derive(DerefMut)]
struct TupleStruct(

View file

@ -1,7 +1,7 @@
//@check-pass
use bevy_derive::DerefMut;
use std::ops::Deref;
use core::ops::Deref;
#[derive(DerefMut)]
// The first field is never read, but we want it there to check that the derive skips it.

View file

@ -1,6 +1,6 @@
//@check-pass
use bevy_derive::DerefMut;
use std::ops::Deref;
use core::ops::Deref;
#[derive(DerefMut)]
struct TupleStruct(#[deref] String);

View file

@ -19,7 +19,7 @@ pub fn derive_deref(input: TokenStream) -> TokenStream {
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
TokenStream::from(quote! {
impl #impl_generics ::std::ops::Deref for #ident #ty_generics #where_clause {
impl #impl_generics ::core::ops::Deref for #ident #ty_generics #where_clause {
type Target = #field_type;
fn deref(&self) -> &Self::Target {
@ -42,7 +42,7 @@ pub fn derive_deref_mut(input: TokenStream) -> TokenStream {
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
TokenStream::from(quote! {
impl #impl_generics ::std::ops::DerefMut for #ident #ty_generics #where_clause {
impl #impl_generics ::core::ops::DerefMut for #ident #ty_generics #where_clause {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.#field_member
}

View file

@ -9,7 +9,7 @@ use bevy_app::prelude::*;
use bevy_ecs::prelude::*;
use bevy_render::view::screenshot::trigger_screenshots;
use bevy_time::TimeUpdateStrategy;
use std::time::Duration;
use core::time::Duration;
/// A plugin that instruments continuous integration testing by automatically executing user-defined actions.
///

View file

@ -8,7 +8,7 @@ pub(crate) fn send_events(world: &mut World, mut current_frame: Local<u32>) {
let mut config = world.resource_mut::<CiTestingConfig>();
// Take all events for the current frame, leaving all the remaining alone.
let events = std::mem::take(&mut config.events);
let events = core::mem::take(&mut config.events);
let (to_run, remaining): (Vec<_>, _) = events
.into_iter()
.partition(|event| event.0 == *current_frame);

Some files were not shown because too many files have changed in this diff Show more