Merge branch 'main' into main
|
@ -1,40 +1,151 @@
|
|||
# Add the contents of this file to `config.toml` to enable "fast build" configuration. Please read the notes below.
|
||||
|
||||
# NOTE: For maximum performance, build using a nightly compiler
|
||||
# If you are using rust stable, remove the "-Zshare-generics=y" below.
|
||||
# Copy this file to `config.toml` to speed up your builds.
|
||||
#
|
||||
# # Faster linker
|
||||
#
|
||||
# One of the slowest aspects of compiling large Rust programs is the linking time. This file configures an
|
||||
# alternate linker that may improve build times. When choosing a new linker, you have two options:
|
||||
#
|
||||
# ## LLD
|
||||
#
|
||||
# LLD is a linker from the LLVM project that supports Linux, Windows, MacOS, and WASM. It has the greatest
|
||||
# platform support and the easiest installation process. It is enabled by default in this file for Linux
|
||||
# and Windows. On MacOS, the default linker yields higher performance than LLD and is used instead.
|
||||
#
|
||||
# To install, please scroll to the corresponding table for your target (eg. `[target.x86_64-pc-windows-msvc]`
|
||||
# for Windows) and follow the steps under `LLD linker`.
|
||||
#
|
||||
# For more information, please see LLD's website at <https://lld.llvm.org>.
|
||||
#
|
||||
# ## Mold
|
||||
#
|
||||
# Mold is a newer linker written by one of the authors of LLD. It boasts even greater performance, specifically
|
||||
# through its high parallelism, though it only supports Linux.
|
||||
#
|
||||
# Mold is disabled by default in this file. If you wish to enable it, follow the installation instructions for
|
||||
# your corresponding target, disable LLD by commenting out its `-Clink-arg=...` line, and enable Mold by
|
||||
# *uncommenting* its `-Clink-arg=...` line.
|
||||
#
|
||||
# There is a fork of Mold named Sold that supports MacOS, but it is unmaintained and is about the same speed as
|
||||
# the default ld64 linker. For this reason, it is not included in this file.
|
||||
#
|
||||
# For more information, please see Mold's repository at <https://github.com/rui314/mold>.
|
||||
#
|
||||
# # Nightly configuration
|
||||
#
|
||||
# Be warned that the following features require nightly Rust, which is expiremental and may contain bugs. If you
|
||||
# are having issues, skip this section and use stable Rust instead.
|
||||
#
|
||||
# There are a few unstable features that can improve performance. To use them, first install nightly Rust
|
||||
# through Rustup:
|
||||
#
|
||||
# ```
|
||||
# rustup toolchain install nightly
|
||||
# ```
|
||||
#
|
||||
# Finally, uncomment the lines under the `Nightly` heading for your corresponding target table (eg.
|
||||
# `[target.x86_64-unknown-linux-gnu]` for Linux) to enable the following features:
|
||||
#
|
||||
# ## `share-generics`
|
||||
#
|
||||
# Usually rustc builds each crate separately, then combines them all together at the end. `share-generics` forces
|
||||
# crates to share monomorphized generic code, so they do not duplicate work.
|
||||
#
|
||||
# In other words, instead of crate 1 generating `Foo<String>` and crate 2 generating `Foo<String>` separately,
|
||||
# only one crate generates `Foo<String>` and the other adds on to the pre-exiting work.
|
||||
#
|
||||
# Note that you may have some issues with this flag on Windows. If compiling fails due to the 65k symbol limit,
|
||||
# you may have to disable this setting. For more information and possible solutions to this error, see
|
||||
# <https://github.com/bevyengine/bevy/issues/1110>.
|
||||
#
|
||||
# ## `threads`
|
||||
#
|
||||
# This option enables rustc's parallel frontend, which improves performance when parsing, type checking, borrow
|
||||
# checking, and more. We currently set `threads=0`, which defaults to the amount of cores in your CPU.
|
||||
#
|
||||
# For more information, see the blog post at <https://blog.rust-lang.org/2023/11/09/parallel-rustc.html>.
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
linker = "clang"
|
||||
rustflags = [
|
||||
"-Clink-arg=-fuse-ld=lld", # Use LLD Linker
|
||||
"-Zshare-generics=y", # (Nightly) Make the current crate share its generic instantiations
|
||||
"-Zthreads=0", # (Nightly) Use improved multithreading with the recommended amount of threads.
|
||||
# LLD linker
|
||||
#
|
||||
# You may need to install it:
|
||||
#
|
||||
# - Ubuntu: `sudo apt-get install lld clang`
|
||||
# - Fedora: `sudo dnf install lld clang`
|
||||
# - Arch: `sudo pacman -S lld clang`
|
||||
"-Clink-arg=-fuse-ld=lld",
|
||||
|
||||
# Mold linker
|
||||
#
|
||||
# You may need to install it:
|
||||
#
|
||||
# - Ubuntu: `sudo apt-get install mold clang`
|
||||
# - Fedora: `sudo dnf install mold clang`
|
||||
# - Arch: `sudo pacman -S mold clang`
|
||||
# "-Clink-arg=-fuse-ld=/usr/bin/mold",
|
||||
|
||||
# Nightly
|
||||
# "-Zshare-generics=y",
|
||||
# "-Zthreads=0",
|
||||
]
|
||||
|
||||
# NOTE: you must install [Mach-O LLD Port](https://lld.llvm.org/MachO/index.html) on mac. you can easily do this by installing llvm which includes lld with the "brew" package manager:
|
||||
# `brew install llvm`
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = [
|
||||
"-Clink-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld", # Use LLD Linker
|
||||
"-Zshare-generics=y", # (Nightly) Make the current crate share its generic instantiations
|
||||
"-Zthreads=0", # (Nightly) Use improved multithreading with the recommended amount of threads.
|
||||
# LLD linker
|
||||
#
|
||||
# The default ld64 linker is faster, you should continue using it instead.
|
||||
#
|
||||
# You may need to install it:
|
||||
#
|
||||
# Brew: `brew install llvm`
|
||||
# Manually: <https://lld.llvm.org/MachO/index.html>
|
||||
# "-Clink-arg=-fuse-ld=/usr/local/opt/llvm/bin/ld64.lld",
|
||||
|
||||
# Nightly
|
||||
# "-Zshare-generics=y",
|
||||
# "-Zthreads=0",
|
||||
]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = [
|
||||
"-Clink-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld", # Use LLD Linker
|
||||
"-Zshare-generics=y", # (Nightly) Make the current crate share its generic instantiations
|
||||
"-Zthreads=0", # (Nightly) Use improved multithreading with the recommended amount of threads.
|
||||
# LLD linker
|
||||
#
|
||||
# The default ld64 linker is faster, you should continue using it instead.
|
||||
#
|
||||
# You may need to install it:
|
||||
#
|
||||
# Brew: `brew install llvm`
|
||||
# Manually: <https://lld.llvm.org/MachO/index.html>
|
||||
# "-Clink-arg=-fuse-ld=/opt/homebrew/opt/llvm/bin/ld64.lld",
|
||||
|
||||
# Nightly
|
||||
# "-Zshare-generics=y",
|
||||
# "-Zthreads=0",
|
||||
]
|
||||
|
||||
[target.x86_64-pc-windows-msvc]
|
||||
linker = "rust-lld.exe" # Use LLD Linker
|
||||
# LLD linker
|
||||
#
|
||||
# You may need to install it:
|
||||
#
|
||||
# ```
|
||||
# cargo install -f cargo-binutils
|
||||
# rustup component add llvm-tools
|
||||
# ```
|
||||
linker = "rust-lld.exe"
|
||||
rustflags = [
|
||||
"-Zshare-generics=n",
|
||||
"-Zthreads=0", # (Nightly) Use improved multithreading with the recommended amount of threads.
|
||||
# Nightly
|
||||
# "-Zshare-generics=y",
|
||||
# "-Zthreads=0",
|
||||
]
|
||||
|
||||
# Optional: Uncommenting the following improves compile times, but reduces the amount of debug info to 'line number tables only'
|
||||
# In most cases the gains are negligible, but if you are on macos and have slow compile times you should see significant gains.
|
||||
#[profile.dev]
|
||||
#debug = 1
|
||||
# [profile.dev]
|
||||
# debug = 1
|
||||
|
||||
# This is enables you to run the CI tool using `cargo ci`.
|
||||
# This is not enabled by default, you need to copy this file to `config.toml`.
|
||||
[alias]
|
||||
ci = "run --package ci --"
|
||||
|
|
4
.github/FUNDING.yml
vendored
|
@ -1,3 +1 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
custom: https://bevyengine.org/community/donate/
|
||||
custom: https://bevyengine.org/donate/
|
||||
|
|
49
.github/actions/install-linux-deps/action.yml
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
# This action installs a few dependencies necessary to build Bevy on Linux. By default it installs
|
||||
# alsa and udev, but can be configured depending on which libraries are needed:
|
||||
#
|
||||
# ```
|
||||
# - uses: ./.github/actions/install-linux-deps
|
||||
# with:
|
||||
# alsa: false
|
||||
# wayland: true
|
||||
# ```
|
||||
#
|
||||
# See the `inputs` section for all options and their defaults. Note that you must checkout the
|
||||
# repository before you can use this action.
|
||||
#
|
||||
# This action will only install dependencies when the current operating system is Linux. It will do
|
||||
# nothing on any other OS (MacOS, Windows).
|
||||
|
||||
name: Install Linux dependencies
|
||||
description: Installs the dependencies necessary to build Bevy on Linux.
|
||||
inputs:
|
||||
alsa:
|
||||
description: Install alsa (libasound2-dev)
|
||||
required: false
|
||||
default: true
|
||||
udev:
|
||||
description: Install udev (libudev-dev)
|
||||
required: false
|
||||
default: true
|
||||
wayland:
|
||||
description: Install Wayland (libwayland-dev)
|
||||
required: false
|
||||
default: false
|
||||
xkb:
|
||||
description: Install xkb (libxkbcommon-dev)
|
||||
required: false
|
||||
default: false
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Linux dependencies
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'linux' }}
|
||||
run: >
|
||||
sudo apt-get update
|
||||
|
||||
sudo apt-get install --no-install-recommends
|
||||
${{ fromJSON(inputs.alsa) && 'libasound2-dev' || '' }}
|
||||
${{ fromJSON(inputs.udev) && 'libudev-dev' || '' }}
|
||||
${{ fromJSON(inputs.wayland) && 'libwayland-dev' || '' }}
|
||||
${{ fromJSON(inputs.xkb) && 'libxkbcommon-dev' || '' }}
|
1
.github/contributing/engine_style_guide.md
vendored
|
@ -14,6 +14,7 @@ For more advice on contributing to the engine, see the [relevant section](../../
|
|||
4. Use \`variable_name\` code blocks in comments to signify that you're referring to specific types and variables.
|
||||
5. Start comments with capital letters. End them with a period if they are sentence-like.
|
||||
3. Use comments to organize long and complex stretches of code that can't sensibly be refactored into separate functions.
|
||||
4. When using [Bevy error codes](https://bevyengine.org/learn/errors/) include a link to the relevant error on the Bevy website in the returned error message `... See: https://bevyengine.org/learn/errors/#b0003`.
|
||||
|
||||
## Rust API guidelines
|
||||
|
||||
|
|
4
.github/example-run/alien_cake_addict.ron
vendored
|
@ -1,3 +1,5 @@
|
|||
(
|
||||
exit_after: Some(300)
|
||||
events: [
|
||||
(300, AppExit),
|
||||
]
|
||||
)
|
||||
|
|
10
.github/example-run/breakout.ron
vendored
|
@ -1,5 +1,9 @@
|
|||
(
|
||||
exit_after: Some(900),
|
||||
frame_time: Some(0.03),
|
||||
screenshot_frames: [200],
|
||||
setup: (
|
||||
fixed_frame_time: Some(0.03),
|
||||
),
|
||||
events: [
|
||||
(200, Screenshot),
|
||||
(900, AppExit),
|
||||
]
|
||||
)
|
||||
|
|
4
.github/example-run/contributors.ron
vendored
|
@ -1,3 +1,5 @@
|
|||
(
|
||||
exit_after: Some(900)
|
||||
events: [
|
||||
(900, AppExit),
|
||||
]
|
||||
)
|
||||
|
|
10
.github/example-run/load_gltf.ron
vendored
|
@ -1,5 +1,9 @@
|
|||
(
|
||||
exit_after: Some(300),
|
||||
frame_time: Some(0.03),
|
||||
screenshot_frames: [100],
|
||||
setup: (
|
||||
frame_time: Some(0.03),
|
||||
),
|
||||
events: [
|
||||
(100, Screenshot),
|
||||
(300, AppExit),
|
||||
]
|
||||
)
|
||||
|
|
4
.github/example-run/no_renderer.ron
vendored
|
@ -1,3 +1,5 @@
|
|||
(
|
||||
exit_after: Some(100)
|
||||
events: [
|
||||
(100, AppExit),
|
||||
]
|
||||
)
|
||||
|
|
4
.github/example-run/scene.ron
vendored
|
@ -1,3 +1,5 @@
|
|||
(
|
||||
exit_after: Some(100)
|
||||
events: [
|
||||
(100, AppExit),
|
||||
]
|
||||
)
|
||||
|
|
7
.github/pull_request_template.md
vendored
|
@ -7,6 +7,13 @@
|
|||
|
||||
- Describe the solution used to achieve the objective above.
|
||||
|
||||
## Testing
|
||||
|
||||
- Did you test these changes? If so, how?
|
||||
- Are there any parts that need more testing?
|
||||
- How can other people (reviewers) test your changes? Is there anything specific they need to know?
|
||||
- If relevant, what platforms did you test these changes on, and are there any important ones you can't test?
|
||||
|
||||
---
|
||||
|
||||
## Changelog
|
||||
|
|
173
.github/workflows/ci.yml
vendored
|
@ -9,8 +9,13 @@ on:
|
|||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
# If nightly is breaking CI, modify this variable to target a specific nightly version.
|
||||
NIGHTLY_TOOLCHAIN: nightly
|
||||
|
||||
concurrency:
|
||||
group: ${{github.workflow}}-${{github.ref}}
|
||||
cancel-in-progress: ${{github.event_name == 'pull_request'}}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
|
@ -30,9 +35,8 @@ jobs:
|
|||
target/
|
||||
key: ${{ runner.os }}-cargo-build-stable-${{ hashFiles('**/Cargo.toml') }}
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev
|
||||
if: runner.os == 'linux'
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Build & run tests
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- test
|
||||
|
@ -57,14 +61,18 @@ jobs:
|
|||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev libwayland-dev libxkbcommon-dev
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
- name: CI job
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- lints
|
||||
|
||||
miri:
|
||||
runs-on: ubuntu-latest
|
||||
# Explicity use MacOS 14 to take advantage of M1 chip.
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -81,8 +89,6 @@ jobs:
|
|||
with:
|
||||
toolchain: ${{ env.NIGHTLY_TOOLCHAIN }}
|
||||
components: miri
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev libwayland-dev libxkbcommon-dev
|
||||
- name: CI job
|
||||
# To run the tests one item at a time for troubleshooting, use
|
||||
# cargo --quiet test --lib -- --list | sed 's/: test$//' | MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance -Zmiri-disable-weak-memory-emulation" xargs -n1 cargo miri test -p bevy_ecs --lib -- --exact
|
||||
|
@ -116,8 +122,8 @@ jobs:
|
|||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Check Compile
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- compile
|
||||
|
@ -142,8 +148,31 @@ jobs:
|
|||
target: wasm32-unknown-unknown
|
||||
- name: Check wasm
|
||||
run: cargo check --target wasm32-unknown-unknown
|
||||
|
||||
build-wasm-atomics:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
needs: build
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ubuntu-assets-cargo-build-wasm-nightly-${{ hashFiles('**/Cargo.toml') }}
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.NIGHTLY_TOOLCHAIN }}
|
||||
targets: wasm32-unknown-unknown
|
||||
components: rust-src
|
||||
- name: Check wasm
|
||||
run: cargo check --target wasm32-unknown-unknown -Z build-std=std,panic_abort
|
||||
env:
|
||||
RUSTFLAGS: --cfg=web_sys_unstable_apis
|
||||
RUSTFLAGS: "-C target-feature=+atomics,+bulk-memory"
|
||||
|
||||
markdownlint:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -168,11 +197,9 @@ jobs:
|
|||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install taplo
|
||||
run: |
|
||||
curl -fsSL https://github.com/tamasfe/taplo/releases/latest/download/taplo-full-linux-x86_64.gz \
|
||||
| gzip -d - \
|
||||
| install -m 755 /dev/stdin /usr/local/bin/taplo
|
||||
run: cargo install taplo-cli --locked
|
||||
- name: Run Taplo
|
||||
id: taplo
|
||||
run: taplo fmt --check --diff
|
||||
|
@ -180,42 +207,72 @@ jobs:
|
|||
if: failure()
|
||||
run: |
|
||||
echo 'To fix toml fmt, please run `taplo fmt`'
|
||||
echo 'Or if you use VSCode, use the Even Better Toml extension'
|
||||
echo 'To check for a diff, run `taplo fmt --check --diff'
|
||||
echo 'You can find taplo here: https://taplo.tamasfe.dev/'
|
||||
echo 'Or if you use VSCode, use the `Even Better Toml` extension with 2 spaces'
|
||||
echo 'You can find the extension here: https://marketplace.visualstudio.com/items?itemName=tamasfe.even-better-toml'
|
||||
|
||||
|
||||
run-examples-on-windows-dx12:
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 60
|
||||
typos:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@v1.21.0
|
||||
- name: Typos info
|
||||
if: failure()
|
||||
run: |
|
||||
echo 'To fix typos, please run `typos -w`'
|
||||
echo 'To check for a diff, run `typos`'
|
||||
echo 'You can find typos here: https://crates.io/crates/typos'
|
||||
echo 'if you use VSCode, you can also install `Typos Spell Checker'
|
||||
echo 'You can find the extension here: https://marketplace.visualstudio.com/items?itemName=tekumara.typos-vscode'
|
||||
|
||||
|
||||
run-examples-macos-metal:
|
||||
# Explicity use MacOS 14 to take advantage of M1 chip.
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-windows-run-examples-${{ hashFiles('**/Cargo.toml') }}
|
||||
|
||||
- name: Disable audio
|
||||
# Disable audio through a patch. on github m1 runners, audio timeouts after 15 minutes
|
||||
run: git apply --ignore-whitespace tools/example-showcase/disable-audio.patch
|
||||
- name: Build bevy
|
||||
shell: bash
|
||||
# this uses the same command as when running the example to ensure build is reused
|
||||
run: |
|
||||
WGPU_BACKEND=dx12 CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing"
|
||||
|
||||
TRACE_CHROME=trace-alien_cake_addict.json CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing,trace,trace_chrome"
|
||||
- name: Run examples
|
||||
shell: bash
|
||||
run: |
|
||||
for example in .github/example-run/*.ron; do
|
||||
example_name=`basename $example .ron`
|
||||
echo -n $example_name > last_example_run
|
||||
echo "running $example_name - "`date`
|
||||
time WGPU_BACKEND=dx12 CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing"
|
||||
time TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
||||
sleep 10
|
||||
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
||||
mkdir screenshots-$example_name
|
||||
mv screenshot-*.png screenshots-$example_name/
|
||||
fi
|
||||
done
|
||||
mkdir traces && mv trace*.json traces/
|
||||
mkdir screenshots && mv screenshots-* screenshots/
|
||||
- name: save traces
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: example-traces-macos
|
||||
path: traces
|
||||
- name: save screenshots
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: screenshots-macos
|
||||
path: screenshots
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && github.event_name == 'pull_request' }}
|
||||
with:
|
||||
name: example-run-macos
|
||||
path: example-run/
|
||||
|
||||
check-doc:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -232,9 +289,11 @@ jobs:
|
|||
target/
|
||||
key: ${{ runner.os }}-check-doc-${{ hashFiles('**/Cargo.toml') }}
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev libwayland-dev libxkbcommon-dev
|
||||
if: runner.os == 'linux'
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
- name: Build and check doc
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- doc
|
||||
|
@ -332,14 +391,15 @@ jobs:
|
|||
target/
|
||||
key: ${{ runner.os }}-cargo-msrv-${{ hashFiles('**/Cargo.toml') }}
|
||||
- name: get MSRV
|
||||
id: msrv
|
||||
run: |
|
||||
msrv=`cargo metadata --no-deps --format-version 1 | jq --raw-output '.packages[] | select(.name=="bevy") | .rust_version'`
|
||||
echo "MSRV=$msrv" >> $GITHUB_ENV
|
||||
echo "msrv=$msrv" >> $GITHUB_OUTPUT
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.MSRV }}
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev
|
||||
toolchain: ${{ steps.msrv.outputs.msrv }}
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Run cargo check
|
||||
id: check
|
||||
run: cargo check
|
||||
|
@ -374,4 +434,29 @@ jobs:
|
|||
echo " Fix the issue by replacing 'bevy_internal' with 'bevy'"
|
||||
echo " Example: 'use bevy::sprite::MaterialMesh2dBundle;' instead of 'bevy_internal::sprite::MaterialMesh2dBundle;'"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
check-cfg:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-check-doc-${{ hashFiles('**/Cargo.toml') }}
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.NIGHTLY_TOOLCHAIN }}
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
- name: Build and check cfg typos
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- cfg-check
|
||||
|
|
1
.github/workflows/daily.yml
vendored
|
@ -7,7 +7,6 @@ on:
|
|||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
NIGHTLY_TOOLCHAIN: nightly
|
||||
|
||||
jobs:
|
||||
build-for-iOS:
|
||||
|
|
4
.github/workflows/dependencies.yml
vendored
|
@ -12,6 +12,10 @@ on:
|
|||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{github.workflow}}-${{github.ref}}
|
||||
cancel-in-progress: ${{github.event_name == 'pull_request'}}
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
|
|
23
.github/workflows/docs.yml
vendored
|
@ -4,13 +4,14 @@ on:
|
|||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
|
||||
# Allows running the action manually.
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTDOCFLAGS: --html-in-header header.html
|
||||
# If nightly is breaking CI, modify this variable to target a specific nightly version.
|
||||
NIGHTLY_TOOLCHAIN: nightly
|
||||
|
||||
# Sets the permissions to allow deploying to Github pages.
|
||||
permissions:
|
||||
|
@ -26,6 +27,9 @@ concurrency:
|
|||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run this job when on the main Bevy repository. Without this, it would also run on forks
|
||||
# where developers work on the main branch but have not enabled Github Pages.
|
||||
if: ${{ github.repository == 'bevyengine/bevy' }}
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
|
@ -33,11 +37,15 @@ jobs:
|
|||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.NIGHTLY_TOOLCHAIN }}
|
||||
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev libwayland-dev libxkbcommon-dev
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
|
||||
# This does the following:
|
||||
# - Replaces the docs icon with one that clearly denotes it's not the released package on crates.io
|
||||
|
@ -48,7 +56,10 @@ jobs:
|
|||
echo "<meta name=\"robots\" content=\"noindex\">" > header.html
|
||||
|
||||
- name: Build docs
|
||||
run: cargo doc --all-features --no-deps -p bevy
|
||||
env:
|
||||
# needs to be in sync with [package.metadata.docs.rs]
|
||||
RUSTDOCFLAGS: -Zunstable-options --cfg=docsrs
|
||||
run: cargo doc --all-features --no-deps -p bevy -Zunstable-options -Zrustdoc-scrape-examples
|
||||
|
||||
# This adds the following:
|
||||
# - A top level redirect to the bevy crate documentation
|
||||
|
|
2
.github/workflows/post-release.yml
vendored
|
@ -49,7 +49,7 @@ jobs:
|
|||
--exclude build-wasm-example
|
||||
|
||||
- name: Create PR
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
delete-branch: true
|
||||
base: "main"
|
||||
|
|
5
.github/workflows/release.yml
vendored
|
@ -46,11 +46,10 @@ jobs:
|
|||
--exclude build-wasm-example
|
||||
|
||||
- name: Create PR
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
delete-branch: true
|
||||
base: "main"
|
||||
title: "Preparing Next Release"
|
||||
body: |
|
||||
Preparing next release
|
||||
This PR has been auto-generated
|
||||
Preparing next release. This PR has been auto-generated.
|
||||
|
|
98
.github/workflows/validation-jobs.yml
vendored
|
@ -7,8 +7,13 @@ on:
|
|||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{github.workflow}}-${{github.ref}}
|
||||
cancel-in-progress: ${{github.event_name == 'pull_request'}}
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
# If nightly is breaking CI, modify this variable to target a specific nightly version.
|
||||
NIGHTLY_TOOLCHAIN: nightly
|
||||
|
||||
jobs:
|
||||
|
@ -27,8 +32,9 @@ jobs:
|
|||
target
|
||||
key: ${{ runner.os }}-ios-install-${{ matrix.toolchain }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
# TODO: remove x86 target once it always run on arm GitHub runners
|
||||
- name: Add iOS targets
|
||||
run: rustup target add aarch64-apple-ios x86_64-apple-ios
|
||||
run: rustup target add aarch64-apple-ios x86_64-apple-ios aarch64-apple-ios-sim
|
||||
|
||||
- name: Build and install iOS app in iOS Simulator.
|
||||
run: cd examples/mobile && make install
|
||||
|
@ -66,18 +72,14 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Install Bevy dependencies
|
||||
run: |
|
||||
sudo apt-get update;
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install --no-install-recommends -yq \
|
||||
libasound2-dev libudev-dev libxkbcommon-x11-0;
|
||||
- name: install xvfb, llvmpipe and lavapipe
|
||||
run: |
|
||||
sudo apt-get update -y -qq
|
||||
sudo add-apt-repository ppa:kisak/turtle -y
|
||||
sudo apt-get update
|
||||
sudo apt install -y xvfb libegl1-mesa libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
# At some point this may be merged into `install-linux-deps`, but for now it is its own step.
|
||||
- name: Install additional Linux dependencies for Vulkan
|
||||
run: |
|
||||
sudo add-apt-repository ppa:kisak/turtle -y
|
||||
sudo apt-get install --no-install-recommends libxkbcommon-x11-0 xvfb libegl1-mesa libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
|
@ -105,67 +107,66 @@ jobs:
|
|||
mv screenshot-*.png screenshots-$example_name/
|
||||
fi
|
||||
done
|
||||
zip traces.zip trace*.json
|
||||
zip -r screenshots.zip screenshots-*
|
||||
mkdir traces && mv trace*.json traces/
|
||||
mkdir screenshots && mv screenshots-* screenshots/
|
||||
- name: save traces
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: example-traces-linux
|
||||
path: traces.zip
|
||||
path: traces
|
||||
- name: save screenshots
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: screenshots-linux
|
||||
path: screenshots.zip
|
||||
path: screenshots
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && github.event_name == 'pull_request' }}
|
||||
with:
|
||||
name: example-run-linux
|
||||
path: example-run/
|
||||
|
||||
run-examples-macos-metal:
|
||||
run-examples-on-windows-dx12:
|
||||
if: ${{ github.event_name == 'merge_group' }}
|
||||
runs-on: macos-14
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Disable audio
|
||||
# Disable audio through a patch. on github m1 runners, audio timeouts after 15 minutes
|
||||
run: git apply --ignore-whitespace tools/example-showcase/disable-audio.patch
|
||||
- name: Build bevy
|
||||
shell: bash
|
||||
# this uses the same command as when running the example to ensure build is reused
|
||||
run: |
|
||||
TRACE_CHROME=trace-alien_cake_addict.json CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing,trace,trace_chrome"
|
||||
WGPU_BACKEND=dx12 TRACE_CHROME=trace-alien_cake_addict.json CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing,trace,trace_chrome"
|
||||
- name: Run examples
|
||||
shell: bash
|
||||
run: |
|
||||
for example in .github/example-run/*.ron; do
|
||||
example_name=`basename $example .ron`
|
||||
echo -n $example_name > last_example_run
|
||||
echo "running $example_name - "`date`
|
||||
time TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
||||
time WGPU_BACKEND=dx12 TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
||||
sleep 10
|
||||
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
||||
mkdir screenshots-$example_name
|
||||
mv screenshot-*.png screenshots-$example_name/
|
||||
fi
|
||||
done
|
||||
zip traces.zip trace*.json
|
||||
zip -r screenshots.zip screenshots-*
|
||||
mkdir traces && mv trace*.json traces/
|
||||
mkdir screenshots && mv screenshots-* screenshots/
|
||||
- name: save traces
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: example-traces-macos
|
||||
path: traces.zip
|
||||
name: example-traces-windows
|
||||
path: traces
|
||||
- name: save screenshots
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: screenshots-macos
|
||||
path: screenshots.zip
|
||||
name: screenshots-windows
|
||||
path: screenshots
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && github.event_name == 'pull_request' }}
|
||||
with:
|
||||
name: example-run-macos
|
||||
name: example-run-windows
|
||||
path: example-run/
|
||||
|
||||
run-examples-on-wasm:
|
||||
|
@ -236,8 +237,8 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Build
|
||||
run: cargo build -p ${{ matrix.crate }} --no-default-features
|
||||
env:
|
||||
|
@ -278,7 +279,36 @@ jobs:
|
|||
toolchain: ${{ env.NIGHTLY_TOOLCHAIN }}
|
||||
- name: Installs cargo-udeps
|
||||
run: cargo install --force cargo-udeps
|
||||
- name: Install alsa and udev
|
||||
run: sudo apt-get update; sudo apt-get install --no-install-recommends libasound2-dev libudev-dev
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Run cargo udeps
|
||||
run: cargo udeps
|
||||
|
||||
check-example-showcase-patches-still-work:
|
||||
if: ${{ github.event_name == 'merge_group' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-check-showcase-patches-${{ hashFiles('**/Cargo.toml') }}
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Apply patches
|
||||
run: |
|
||||
CODE=0
|
||||
for patch in tools/example-showcase/*.patch; do
|
||||
# Try applying the patch, logging an error if it fails.
|
||||
git apply --ignore-whitespace $patch || { echo "::error::$patch failed to apply."; CODE=1; }
|
||||
done
|
||||
exit $CODE
|
||||
- name: Build with patches
|
||||
run: cargo build
|
||||
|
|
111
.github/workflows/weekly.yml
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
name: Weekly beta compile test
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# New versions of rust release on Thursdays. We test on Mondays to get at least 3 days of warning before all our CI breaks again.
|
||||
# https://forge.rust-lang.org/release/process.html#release-day-thursday
|
||||
- cron: '0 12 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@beta
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Build & run tests
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- test
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: "-C debuginfo=0 -D warnings"
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@beta
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
- name: Run lints
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- lints
|
||||
|
||||
check-compiles:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
needs: test
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@beta
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
- name: Check compile test
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- compile
|
||||
|
||||
check-doc:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@beta
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
- name: Build and check docs
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- doc
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: "-C debuginfo=0"
|
||||
|
||||
open-issue:
|
||||
name: Warn that weekly CI fails
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test, lint, check-compiles, check-doc]
|
||||
permissions:
|
||||
issues: write
|
||||
# Use always() so the job doesn't get canceled if any other jobs fail
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') }}
|
||||
steps:
|
||||
- name: Create issue
|
||||
run: |
|
||||
previous_issue_number=$(gh issue list \
|
||||
--search "$TITLE in:title" \
|
||||
--json number \
|
||||
--jq '.[0].number')
|
||||
if [[ -n $previous_issue_number ]]; then
|
||||
gh issue comment $previous_issue_number \
|
||||
--body "Weekly pipeline still fails: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
else
|
||||
gh issue create \
|
||||
--title "$TITLE" \
|
||||
--label "$LABELS" \
|
||||
--body "$BODY"
|
||||
fi
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
TITLE: Main branch fails to compile on Rust beta.
|
||||
LABELS: C-Bug,S-Needs-Triage
|
||||
BODY: |
|
||||
## Weekly CI run has failed.
|
||||
[The offending run.](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
4
.gitignore
vendored
|
@ -13,8 +13,12 @@ dxil.dll
|
|||
# Generated by "examples/scene/scene.rs"
|
||||
assets/scenes/load_scene_example-new.scn.ron
|
||||
|
||||
# Generated by "examples/window/screenshot.rs"
|
||||
**/screenshot-*.png
|
||||
|
||||
assets/**/*.meta
|
||||
crates/bevy_asset/imported_assets
|
||||
imported_assets
|
||||
|
||||
example_showcase_config.ron
|
||||
example-showcase-reports/
|
||||
|
|
1397
CHANGELOG.md
142
CONTRIBUTING.md
|
@ -76,25 +76,33 @@ Check out our dedicated [Bevy Organization document](/docs/the_bevy_organization
|
|||
|
||||
### Classifying PRs
|
||||
|
||||
Our merge strategy relies on the classification of PRs on two axes:
|
||||
[Labels](https://github.com/bevyengine/bevy/labels) are our primary tool to organize work.
|
||||
Each label has a prefix denoting its category:
|
||||
|
||||
* How controversial are the design decisions.
|
||||
* How complex is the implementation.
|
||||
* **D:** Difficulty. In order, these are:
|
||||
* `D-Trivial`: typos, obviously incorrect one-line bug fixes, code reorganization, renames
|
||||
* `D-Straightforward`: simple bug fixes and API improvements, docs, test and examples
|
||||
* `D-Modest`: new features, refactors, challenging bug fixes
|
||||
* `D-Complex`: rewrites and unusually complex features
|
||||
* When applied to an issue, these labels reflect the estimated level of expertise (not time) required to fix the issue.
|
||||
* When applied to a PR, these labels reflect the estimated level of expertise required to *review* the PR.
|
||||
* The `D-Domain-Expert` and `D-Domain-Agnostic` labels are modifiers, which describe if unusually high or low degrees of domain-specific knowledge are required.
|
||||
* The `D-Unsafe` label is applied to any code that touches `unsafe` Rust, which requires special skills and scrutiny.
|
||||
* **X:** Controversiality. In order, these are:
|
||||
* `X-Uncontroversial`: everyone should agree that this is a good idea
|
||||
* `X-Contentious`: there's real design thought needed to ensure that this is the right path forward
|
||||
* `X-Controversial`: there's active disagreement and/or large-scale architectural implications involved
|
||||
* `X-Blessed`: work that was controversial, but whose controversial (but perhaps not technical) elements have been endorsed by the relevant decision makers.
|
||||
* **A:** Area (e.g. A-Animation, A-ECS, A-Rendering, ...).
|
||||
* **C:** Category (e.g. C-Breaking-Change, C-Code-Quality, C-Docs, ...).
|
||||
* **O:** Operating System (e.g. O-Linux, O-Web, O-Windows, ...).
|
||||
* **P:** Priority (e.g. P-Critical, P-High, ...)
|
||||
* Most work is not explicitly categorized by priority: volunteer work mostly occurs on an ad hoc basis depending on contributor interests
|
||||
* **S:** Status (e.g. S-Blocked, S-Needs-Review, S-Needs-Design, ...).
|
||||
|
||||
Each [label](https://github.com/bevyengine/bevy/labels) has a prefix denoting its category:
|
||||
|
||||
* A: Area (e.g. A-Animation, A-ECS, A-Rendering).
|
||||
* C: Category (e.g. C-Breaking-Change, C-Code-Quality, C-Docs).
|
||||
* D: Difficulty (e.g. D-Complex, D-Good-First-Issue).
|
||||
* O: Operating System (e.g. O-Linux, O-Web, O-Windows).
|
||||
* P: Priority (e.g. P-Critical, P-High).
|
||||
* S: Status (e.g. S-Blocked, S-Controversial, S-Needs-Design).
|
||||
|
||||
PRs with non-trivial design decisions are given the [`S-Controversial`] label. This indicates that
|
||||
the PR needs more thorough design review or an [RFC](https://github.com/bevyengine/rfcs), if complex enough.
|
||||
|
||||
PRs that are non-trivial to review are given the [`D-Complex`] label. This indicates that the PR
|
||||
should be reviewed more thoroughly and by people with experience in the area that the PR touches.
|
||||
The rules for how PRs get merged depend on their classification by controversy and difficulty.
|
||||
More difficult PRs will require more careful review from experts,
|
||||
while more controversial PRs will require rewrites to reduce the costs involved and/or sign-off from Subject Matter Experts and Maintainers.
|
||||
|
||||
When making PRs, try to split out more controversial changes from less controversial ones, in order to make your work easier to review and merge.
|
||||
It is also a good idea to try and split out simple changes from more complex changes if it is not helpful for them to be reviewed together.
|
||||
|
@ -155,6 +163,35 @@ We use [Milestones](https://github.com/bevyengine/bevy/milestones) to track issu
|
|||
|
||||
There are also two priority labels: [`P-Critical`](https://github.com/bevyengine/bevy/issues?q=is%3Aopen+is%3Aissue+label%3AP-Critical) and [`P-High`](https://github.com/bevyengine/bevy/issues?q=is%3Aopen+is%3Aissue+label%3AP-High) that can be used to find issues and PRs that need to be resolved urgently.
|
||||
|
||||
### Closing PRs and Issues
|
||||
|
||||
From time to time, PRs are unsuitable to be merged in a way that cannot be readily fixed.
|
||||
Rather than leaving these PRs open in limbo indefinitely, they should simply be closed.
|
||||
|
||||
This might happen if:
|
||||
|
||||
1. The PR is spam or malicious.
|
||||
2. The work has already been done elsewhere or is otherwise fully obsolete.
|
||||
3. The PR was successfully adopted.
|
||||
4. The work is particularly low quality, and the author is resistant to coaching.
|
||||
5. The work adds features or abstraction of limited value, especially in a way that could easily be recreated outside of the engine.
|
||||
6. The work has been sitting in review for so long and accumulated so many conflicts that it would be simpler to redo it from scratch.
|
||||
7. The PR is pointlessly large, and should be broken into multiple smaller PRs for easier review.
|
||||
|
||||
PRs that are `S-Adopt-Me` should be left open, but only if they're genuinely more useful to rebase rather than simply use as a reference.
|
||||
|
||||
There are several paths for PRs to be closed:
|
||||
|
||||
1. Obviously, authors may close their own PRs for any reason at any time.
|
||||
2. If a PR is clearly spam or malicious, anyone with triage rights is encouraged to close out the PR and report it to Github.
|
||||
3. If the work has already been done elsewhere, adopted or otherwise obsoleted, anyone with triage rights is encouraged to close out the PR with an explanatory comment.
|
||||
4. Anyone may nominate a PR for closure, by bringing it to the attention of the author and / or one of the SMEs / maintainers. Let them press the button, but this is generally well-received and helpful.
|
||||
5. SMEs or maintainers may and are encouraged to unilaterally close PRs that fall into one or more of the remaining categories.
|
||||
6. In the case of PRs where some members of the community (other than the author) are in favor and some are opposed, any two relevant SMEs or maintainers may act in concert to close the PR.
|
||||
|
||||
When closing a PR, check if it has an issue linked.
|
||||
If it does not, you should strongly consider creating an issue and linking the now-closed PR to help make sure the previous work can be discovered and credited.
|
||||
|
||||
## Making changes to Bevy
|
||||
|
||||
Most changes don't require much "process". If your change is relatively straightforward, just do the following:
|
||||
|
@ -189,6 +226,49 @@ Take a look at the sections that follow to pick a route (or five) that appeal to
|
|||
|
||||
If you ever find yourself at a loss for what to do, or in need of mentorship or advice on how to contribute to Bevy, feel free to ask in [Discord] and one of our more experienced community members will be happy to help.
|
||||
|
||||
### Join a working group
|
||||
|
||||
Active initiatives in Bevy are organized into temporary working groups: choosing one of those and asking how to help can be a fantastic way to get up to speed and be immediately useful.
|
||||
|
||||
Working groups are public, open-membership groups that work together to tackle a broad-but-scoped initiative.
|
||||
The work that they do is coordinated in a forum-channel on [Discord](https://discord.gg/bevy), although they also create issues and may use project boards for tangible work that needs to be done.
|
||||
|
||||
There are no special requirements to be a member, and no formal membership list or leadership.
|
||||
Anyone can help, and you should expect to compromise and work together with others to bring a shared vision to life.
|
||||
Working groups are *spaces*, not clubs.
|
||||
|
||||
### Start a working group
|
||||
|
||||
When tackling a complex initiative, friends and allies can make things go much more smoothly.
|
||||
|
||||
To start a working group:
|
||||
|
||||
1. Decide what the working group is going to focus on. This should be tightly focused and achievable!
|
||||
2. Gather at least 3 people including yourself who are willing to be in the working group.
|
||||
3. Ping the `@Maintainer` role on Discord in [#engine-dev](https://discord.com/channels/691052431525675048/692572690833473578) announcing your mutual intent and a one or two sentence description of your plans.
|
||||
|
||||
The maintainers will briefly evaluate the proposal in consultation with the relevant SMEs and give you a thumbs up or down on whether this is something Bevy can and wants to explore right now.
|
||||
You don't need a concrete plan at this stage, just a sensible argument for both "why is this something that could be useful to Bevy" and "why there aren't any serious barriers in implementing this in the near future".
|
||||
If they're in favor, a maintainer will create a forum channel for you and you're off to the races.
|
||||
|
||||
Your initial task is writing up a design doc: laying out the scope of work and general implementation strategy.
|
||||
Here's a [solid example of a design doc](https://github.com/bevyengine/bevy/issues/12365), although feel free to use whatever format works best for your team.
|
||||
|
||||
Once that's ready, get a sign-off on the broad vision and goals from the appropriate SMEs and maintainers.
|
||||
This is the primary review step: maintainers and SMEs should be broadly patient and supportive even if they're skeptical until a proper design doc is in hand to evaluate.
|
||||
|
||||
With a sign-off in hand, post the design doc to [Github Discussions](https://github.com/bevyengine/bevy/discussions) with the [`C-Design-Doc` label](https://github.com/bevyengine/bevy/discussions?discussions_q=is%3Aopen+label%3A%22C-Design+Doc%22) for archival purposes and begin work on implementation.
|
||||
Post PRs that you need review on in your group's forum thread, ask for advice, and share the load.
|
||||
Controversial PRs are still `S-Controversial`, but with a sign-off-in-priniciple, things should go more smoothly.
|
||||
|
||||
If work peters out and the initiative dies, maintainers can wind down working groups (in consultation with SMEs and the working group itself).
|
||||
This is normal and expected: projects fail for all sorts of reasons!
|
||||
However, it's important to both keep the number of working groups relatively small and ensure they're active:
|
||||
they serve a vital role in onboarding new contributors.
|
||||
|
||||
Once your implementation work laid out in your initial design doc is complete, it's time to wind down the working group.
|
||||
Feel free to make another one though to tackle the next step in your grand vision!
|
||||
|
||||
### Battle-testing Bevy
|
||||
|
||||
Ultimately, Bevy is a tool that's designed to help people make cool games.
|
||||
|
@ -277,6 +357,11 @@ With the sheer volume of activity in Bevy's community, reviewing others work wit
|
|||
You don't need to be an Elder Rustacean to be useful here: anyone can catch missing tests, unclear docs, logic errors, and so on.
|
||||
If you have specific skills (e.g. advanced familiarity with `unsafe` code, rendering knowledge or web development experience) or personal experience with a problem, try to prioritize those areas to ensure we can get appropriate expertise where we need it.
|
||||
|
||||
When you find (or make) a PR that you don't feel comfortable reviewing, but you *can* think of someone who does, consider using Github's "Request review" functionality (in the top-right of the PR screen) to bring the work to their attention.
|
||||
If they're not a Bevy Org member, you'll need to ping them in the thread directly: that's fine too!
|
||||
Almost everyone working on Bevy is a volunteer: this should be treated as a gentle nudge, rather than an assignment of work.
|
||||
Consider checking the Git history for appropriate reviewers, or ask on Discord for suggestions.
|
||||
|
||||
Focus on giving constructive, actionable feedback that results in real improvements to code quality or end-user experience.
|
||||
If you don't understand why an approach was taken, please ask!
|
||||
|
||||
|
@ -285,7 +370,16 @@ Larger changes deserve a comment in the main thread, or a pull request to the or
|
|||
When in doubt about a matter of architectural philosophy, refer back to [*What we're trying to build*](#what-were-trying-to-build) for guidance.
|
||||
|
||||
Once you're happy with the work and feel you're reasonably qualified to assess quality in this particular area, leave your `Approved` review on the PR.
|
||||
If you're new to GitHub, check out the [Pull Request Review documentation](https://docs.github.com/en/github/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews). Anyone can leave reviews ... no special permissions are required!
|
||||
If you're new to GitHub, check out the [Pull Request Review documentation](https://docs.github.com/en/github/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews).
|
||||
**Anyone** can and should leave reviews ... no special permissions are required!
|
||||
|
||||
It's okay to leave an approval even if you aren't 100% confident on all areas of the PR: just be sure to note your limitations.
|
||||
When maintainers are evaluating the PR to be merged, they'll make sure that there's good coverage on all of the critical areas.
|
||||
If you can only check that the math is correct, and another reviewer can check everything *but* the math, we're in good shape!
|
||||
|
||||
Similarly, if there are areas that would be *good* to fix but aren't severe, please consider leaving an approval.
|
||||
The author can address them immediately, or spin it out into follow-up issues or PRs.
|
||||
Large PRs are much more draining for both reviewers and authors, so try to push for a smaller scope with clearly tracked follow-ups.
|
||||
|
||||
There are three main places you can check for things to review:
|
||||
|
||||
|
@ -334,19 +428,19 @@ If you're new to Bevy, here's the workflow we use:
|
|||
* `cargo run -p ci -- doc` - to run doc tests and doc checks.
|
||||
* `cargo run -p ci -- compile` - to check that everything that must compile still does (examples and benches), and that some that shouldn't still don't ([`crates/bevy_ecs_compile_fail_tests`](./crates/bevy_ecs_compile_fail_tests)).
|
||||
* to get more information on commands available and what is run, check the [tools/ci crate](./tools/ci).
|
||||
|
||||
4. When working with Markdown (`.md`) files, Bevy's CI will check markdown files (like this one) using [markdownlint](https://github.com/DavidAnson/markdownlint).
|
||||
To locally lint your files using the same workflow as our CI:
|
||||
1. Install [markdownlint-cli](https://github.com/igorshubovych/markdownlint-cli).
|
||||
2. Run `markdownlint -f -c .github/linters/.markdown-lint.yml .` in the root directory of the Bevy project.
|
||||
|
||||
5. When working with Toml (`.toml`) files, Bevy's CI will check toml files using [taplo](https://taplo.tamasfe.dev/): `taplo fmt --check --diff`
|
||||
1. If you use VSCode, install [Even better toml](https://marketplace.visualstudio.com/items?itemName=tamasfe.even-better-toml) and format your files.
|
||||
2. If you want to use the cli tool, install [taplo-cli](https://taplo.tamasfe.dev/cli/installation/cargo.html) and run `taplo fmt --check --diff` to check for the formatting. Fix any issues by running `taplo fmt` in the root directory of the Bevy project.
|
||||
|
||||
6. Push your changes to your fork on Github and open a Pull Request.
|
||||
7. Respond to any CI failures or review feedback. While CI failures must be fixed before we can merge your PR, you do not need to *agree* with all feedback from your reviews, merely acknowledge that it was given. If you cannot come to an agreement, leave the thread open and defer to a Maintainer or Project Lead's final judgement.
|
||||
8. When your PR is ready to merge, a Maintainer or Project Lead will review it and suggest final changes. If those changes are minimal they may even apply them directly to speed up merging.
|
||||
6. Check for typos. Bevy's CI will check for them using [typos](https://github.com/crate-ci/typos).
|
||||
1. If you use VSCode, install [Typos Spell Checker](https://marketplace.visualstudio.com/items?itemName=tekumara.typos-vscode).
|
||||
2. You can also use the cli tool. Install [typos-cli](https://github.com/crate-ci/typos?tab=readme-ov-file#install) and run `typos` to check for typos, and fix them by running `typos -w`.
|
||||
7. Push your changes to your fork on Github and open a Pull Request.
|
||||
8. Respond to any CI failures or review feedback. While CI failures must be fixed before we can merge your PR, you do not need to *agree* with all feedback from your reviews, merely acknowledge that it was given. If you cannot come to an agreement, leave the thread open and defer to a Maintainer or Project Lead's final judgement.
|
||||
9. When your PR is ready to merge, a Maintainer or Project Lead will review it and suggest final changes. If those changes are minimal they may even apply them directly to speed up merging.
|
||||
|
||||
If you end up adding a new official Bevy crate to the `bevy` repo:
|
||||
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
* Ground tile from [Kenney's Tower Defense Kit](https://www.kenney.nl/assets/tower-defense-kit) (CC0 1.0 Universal)
|
||||
* Game icons from [Kenney's Game Icons](https://www.kenney.nl/assets/game-icons) (CC0 1.0 Universal)
|
||||
* Space ships from [Kenny's Simple Space Kit](https://www.kenney.nl/assets/simple-space) (CC0 1.0 Universal)
|
||||
* UI borders from [Kenny's Fantasy UI Borders Kit](https://kenney.nl/assets/fantasy-ui-borders) (CC0 1.0 Universal)
|
||||
* glTF animated fox from [glTF Sample Models][fox]
|
||||
* Low poly fox [by PixelMannen] (CC0 1.0 Universal)
|
||||
* Rigging and animation [by @tomkranis on Sketchfab] ([CC-BY 4.0])
|
||||
* FiraMono by The Mozilla Foundation and Telefonica S.A (SIL Open Font License, Version 1.1: assets/fonts/FiraMono-LICENSE)
|
||||
* Barycentric from [mk_bary_gltf](https://github.com/komadori/mk_bary_gltf) (MIT OR Apache-2.0)
|
||||
* `MorphStressTest.gltf`, [MorphStressTest] ([CC-BY 4.0] by Analytical Graphics, Inc, Model and textures by Ed Mackey)
|
||||
* Mysterious acoustic guitar music sample from [florianreichelt](https://freesound.org/people/florianreichelt/sounds/412429/) (CC0 license)
|
||||
* Epic orchestra music sample, modified to loop, from [Migfus20](https://freesound.org/people/Migfus20/sounds/560449/) ([CC BY 4.0 DEED](https://creativecommons.org/licenses/by/4.0/))
|
||||
|
||||
[MorphStressTest]: https://github.com/KhronosGroup/glTF-Sample-Models/tree/master/2.0/MorphStressTest
|
||||
[fox]: https://github.com/KhronosGroup/glTF-Sample-Models/tree/master/2.0/Fox
|
||||
|
|
551
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "bevy"
|
||||
version = "0.12.0"
|
||||
version = "0.14.0-dev"
|
||||
edition = "2021"
|
||||
categories = ["game-engines", "graphics", "gui", "rendering"]
|
||||
description = "A refreshingly simple data-driven game engine and app framework"
|
||||
|
@ -8,17 +8,17 @@ exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"]
|
|||
homepage = "https://bevyengine.org"
|
||||
keywords = ["game", "engine", "gamedev", "graphics", "bevy"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
documentation = "https://docs.rs/bevy"
|
||||
rust-version = "1.74.0"
|
||||
rust-version = "1.77.0"
|
||||
|
||||
[workspace]
|
||||
exclude = [
|
||||
"benches",
|
||||
"crates/bevy_ecs_compile_fail_tests",
|
||||
"crates/bevy_macros_compile_fail_tests",
|
||||
"crates/bevy_reflect_compile_fail_tests",
|
||||
"crates/bevy_derive/compile_fail",
|
||||
"crates/bevy_ecs/compile_fail",
|
||||
"crates/bevy_reflect/compile_fail",
|
||||
"tools/compile_fail_utils",
|
||||
]
|
||||
members = [
|
||||
"crates/*",
|
||||
|
@ -38,11 +38,15 @@ undocumented_unsafe_blocks = "warn"
|
|||
redundant_else = "warn"
|
||||
match_same_arms = "warn"
|
||||
semicolon_if_nothing_returned = "warn"
|
||||
map_flatten = "warn"
|
||||
|
||||
ptr_as_ptr = "warn"
|
||||
ptr_cast_constness = "warn"
|
||||
ref_as_ptr = "warn"
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_op_in_unsafe_fn = "warn"
|
||||
missing_docs = "warn"
|
||||
unsafe_code = "deny"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -51,7 +55,9 @@ workspace = true
|
|||
default = [
|
||||
"animation",
|
||||
"bevy_asset",
|
||||
"bevy_state",
|
||||
"bevy_audio",
|
||||
"bevy_color",
|
||||
"bevy_gilrs",
|
||||
"bevy_scene",
|
||||
"bevy_winit",
|
||||
|
@ -62,7 +68,7 @@ default = [
|
|||
"bevy_sprite",
|
||||
"bevy_text",
|
||||
"bevy_ui",
|
||||
"multi-threaded",
|
||||
"multi_threaded",
|
||||
"png",
|
||||
"hdr",
|
||||
"vorbis",
|
||||
|
@ -74,13 +80,17 @@ default = [
|
|||
"webgl2",
|
||||
"wayland",
|
||||
"bevy_debug_stepping",
|
||||
"sysinfo_plugin",
|
||||
]
|
||||
|
||||
# Force dynamic linking, which improves iterative compile times
|
||||
dynamic_linking = ["dep:bevy_dylib", "bevy_internal/dynamic_linking"]
|
||||
|
||||
# Enables system information diagnostic plugin
|
||||
sysinfo_plugin = ["bevy_internal/sysinfo_plugin"]
|
||||
|
||||
# Provides animation functionality
|
||||
bevy_animation = ["bevy_internal/bevy_animation"]
|
||||
bevy_animation = ["bevy_internal/bevy_animation", "bevy_color"]
|
||||
|
||||
# Provides asset functionality
|
||||
bevy_asset = ["bevy_internal/bevy_asset"]
|
||||
|
@ -88,6 +98,9 @@ bevy_asset = ["bevy_internal/bevy_asset"]
|
|||
# Provides audio functionality
|
||||
bevy_audio = ["bevy_internal/bevy_audio"]
|
||||
|
||||
# Provides shared color types and operations
|
||||
bevy_color = ["bevy_internal/bevy_color"]
|
||||
|
||||
# Provides cameras and other basic render pipeline features
|
||||
bevy_core_pipeline = [
|
||||
"bevy_internal/bevy_core_pipeline",
|
||||
|
@ -113,13 +126,18 @@ bevy_pbr = [
|
|||
]
|
||||
|
||||
# Provides rendering functionality
|
||||
bevy_render = ["bevy_internal/bevy_render"]
|
||||
bevy_render = ["bevy_internal/bevy_render", "bevy_color"]
|
||||
|
||||
# Provides scene functionality
|
||||
bevy_scene = ["bevy_internal/bevy_scene", "bevy_asset"]
|
||||
|
||||
# Provides sprite functionality
|
||||
bevy_sprite = ["bevy_internal/bevy_sprite", "bevy_render", "bevy_core_pipeline"]
|
||||
bevy_sprite = [
|
||||
"bevy_internal/bevy_sprite",
|
||||
"bevy_render",
|
||||
"bevy_core_pipeline",
|
||||
"bevy_color",
|
||||
]
|
||||
|
||||
# Provides text functionality
|
||||
bevy_text = ["bevy_internal/bevy_text", "bevy_asset", "bevy_sprite"]
|
||||
|
@ -130,13 +148,17 @@ bevy_ui = [
|
|||
"bevy_core_pipeline",
|
||||
"bevy_text",
|
||||
"bevy_sprite",
|
||||
"bevy_color",
|
||||
]
|
||||
|
||||
# winit window and input backend
|
||||
bevy_winit = ["bevy_internal/bevy_winit"]
|
||||
|
||||
# Adds support for rendering gizmos
|
||||
bevy_gizmos = ["bevy_internal/bevy_gizmos"]
|
||||
bevy_gizmos = ["bevy_internal/bevy_gizmos", "bevy_color"]
|
||||
|
||||
# Provides a collection of developer tools
|
||||
bevy_dev_tools = ["bevy_internal/bevy_dev_tools"]
|
||||
|
||||
# Tracing support, saving a file in Chrome Tracing format
|
||||
trace_chrome = ["trace", "bevy_internal/trace_chrome"]
|
||||
|
@ -233,7 +255,7 @@ symphonia-wav = ["bevy_internal/symphonia-wav"]
|
|||
serialize = ["bevy_internal/serialize"]
|
||||
|
||||
# Enables multithreaded parallelism in the engine. Disabling it forces all engine tasks to run on a single thread.
|
||||
multi-threaded = ["bevy_internal/multi-threaded"]
|
||||
multi_threaded = ["bevy_internal/multi_threaded"]
|
||||
|
||||
# Use async-io's implementation of block_on instead of futures-lite's implementation. This is preferred if your application uses async-io.
|
||||
async-io = ["bevy_internal/async-io"]
|
||||
|
@ -283,10 +305,15 @@ shader_format_spirv = ["bevy_internal/shader_format_spirv"]
|
|||
# Enable support for transmission-related textures in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs
|
||||
pbr_transmission_textures = ["bevy_internal/pbr_transmission_textures"]
|
||||
|
||||
# Enable support for multi-layer material textures in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs
|
||||
pbr_multi_layer_material_textures = [
|
||||
"bevy_internal/pbr_multi_layer_material_textures",
|
||||
]
|
||||
|
||||
# Enable some limitations to be able to use WebGL2. Please refer to the [WebGL2 and WebGPU](https://github.com/bevyengine/bevy/tree/latest/examples#webgl2-and-webgpu) section of the examples README for more information on how to run Wasm builds with WebGPU.
|
||||
webgl2 = ["bevy_internal/webgl"]
|
||||
|
||||
# Enable support for WebGPU in Wasm. When enabled, this feature will override the `webgl2` feature and you won't be able to run Wasm builds with WebGL2, only with WebGPU. Requires the `RUSTFLAGS` environment variable to be set to `--cfg=web_sys_unstable_apis` when building.
|
||||
# Enable support for WebGPU in Wasm. When enabled, this feature will override the `webgl2` feature and you won't be able to run Wasm builds with WebGL2, only with WebGPU.
|
||||
webgpu = ["bevy_internal/webgpu"]
|
||||
|
||||
# Enables the built-in asset processor for processed assets.
|
||||
|
@ -301,12 +328,28 @@ embedded_watcher = ["bevy_internal/embedded_watcher"]
|
|||
# Enable stepping-based debugging of Bevy systems
|
||||
bevy_debug_stepping = ["bevy_internal/bevy_debug_stepping"]
|
||||
|
||||
# Enables the meshlet renderer for dense high-poly scenes (experimental)
|
||||
meshlet = ["bevy_internal/meshlet"]
|
||||
|
||||
# Enables processing meshes into meshlet meshes for bevy_pbr
|
||||
meshlet_processor = ["bevy_internal/meshlet_processor"]
|
||||
|
||||
# Enable support for the ios_simulator by downgrading some rendering capabilities
|
||||
ios_simulator = ["bevy_internal/ios_simulator"]
|
||||
|
||||
# Enable built in global state machines
|
||||
bevy_state = ["bevy_internal/bevy_state"]
|
||||
|
||||
[dependencies]
|
||||
bevy_dylib = { path = "crates/bevy_dylib", version = "0.12.0", default-features = false, optional = true }
|
||||
bevy_internal = { path = "crates/bevy_internal", version = "0.12.0", default-features = false }
|
||||
bevy_internal = { path = "crates/bevy_internal", version = "0.14.0-dev", default-features = false }
|
||||
|
||||
# WASM does not support dynamic linking.
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
bevy_dylib = { path = "crates/bevy_dylib", version = "0.14.0-dev", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.0"
|
||||
rand_chacha = "0.3.1"
|
||||
ron = "0.8.0"
|
||||
flate2 = "1.0"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
@ -315,6 +358,7 @@ bytemuck = "1.7"
|
|||
futures-lite = "2.0.1"
|
||||
crossbeam-channel = "0.5.0"
|
||||
argh = "0.1.12"
|
||||
thiserror = "1.0"
|
||||
|
||||
[[example]]
|
||||
name = "hello_world"
|
||||
|
@ -350,6 +394,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "2d_viewport_to_world"
|
||||
path = "examples/2d/2d_viewport_to_world.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.2d_viewport_to_world]
|
||||
name = "2D Viewport To World"
|
||||
|
@ -423,17 +468,6 @@ description = "Renders a glTF mesh in 2D with a custom vertex attribute"
|
|||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "2d_gizmos"
|
||||
path = "examples/2d/2d_gizmos.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.2d_gizmos]
|
||||
name = "2D Gizmos"
|
||||
description = "A scene showcasing 2D gizmos"
|
||||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "sprite"
|
||||
path = "examples/2d/sprite.rs"
|
||||
|
@ -445,6 +479,17 @@ description = "Renders a sprite"
|
|||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "sprite_animation"
|
||||
path = "examples/2d/sprite_animation.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.sprite_animation]
|
||||
name = "Sprite Animation"
|
||||
description = "Animates a sprite in response to an event"
|
||||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "sprite_flipping"
|
||||
path = "examples/2d/sprite_flipping.rs"
|
||||
|
@ -470,6 +515,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "sprite_tile"
|
||||
path = "examples/2d/sprite_tile.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.sprite_tile]
|
||||
name = "Sprite Tile"
|
||||
|
@ -480,6 +526,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "sprite_slice"
|
||||
path = "examples/2d/sprite_slice.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.sprite_slice]
|
||||
name = "Sprite Slice"
|
||||
|
@ -523,6 +570,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "pixel_grid_snap"
|
||||
path = "examples/2d/pixel_grid_snap.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.pixel_grid_snap]
|
||||
name = "Pixel Grid Snapping"
|
||||
|
@ -541,6 +589,17 @@ description = "Showcases bounding volumes and intersection tests"
|
|||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "wireframe_2d"
|
||||
path = "examples/2d/wireframe_2d.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.wireframe_2d]
|
||||
name = "2D Wireframe"
|
||||
description = "Showcases wireframes for 2d meshes"
|
||||
category = "2D Rendering"
|
||||
wasm = false
|
||||
|
||||
# 3D Rendering
|
||||
[[example]]
|
||||
name = "3d_scene"
|
||||
|
@ -608,17 +667,6 @@ description = "Compares different anti-aliasing methods"
|
|||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "3d_gizmos"
|
||||
path = "examples/3d/3d_gizmos.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.3d_gizmos]
|
||||
name = "3D Gizmos"
|
||||
description = "A scene showcasing 3D gizmos"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "atmospheric_fog"
|
||||
path = "examples/3d/atmospheric_fog.rs"
|
||||
|
@ -641,6 +689,17 @@ description = "A scene showcasing the distance fog effect"
|
|||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "auto_exposure"
|
||||
path = "examples/3d/auto_exposure.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.auto_exposure]
|
||||
name = "Auto Exposure"
|
||||
description = "A scene showcasing auto exposure"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "blend_modes"
|
||||
path = "examples/3d/blend_modes.rs"
|
||||
|
@ -652,17 +711,6 @@ description = "Showcases different blend modes"
|
|||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "deterministic"
|
||||
path = "examples/3d/deterministic.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.deterministic]
|
||||
name = "Deterministic rendering"
|
||||
description = "Stop flickering from z-fighting at a performance cost"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "lighting"
|
||||
path = "examples/3d/lighting.rs"
|
||||
|
@ -721,6 +769,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "deferred_rendering"
|
||||
path = "examples/3d/deferred_rendering.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.deferred_rendering]
|
||||
name = "Deferred Rendering"
|
||||
|
@ -739,6 +788,17 @@ description = "Loads and renders a glTF file as a scene"
|
|||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "motion_blur"
|
||||
path = "examples/3d/motion_blur.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.motion_blur]
|
||||
name = "Motion Blur"
|
||||
description = "Demonstrates per-pixel motion blur"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "tonemapping"
|
||||
path = "examples/3d/tonemapping.rs"
|
||||
|
@ -885,6 +945,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "transmission"
|
||||
path = "examples/3d/transmission.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.transmission]
|
||||
name = "Transmission"
|
||||
|
@ -936,6 +997,37 @@ description = "Showcases wireframe rendering"
|
|||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "irradiance_volumes"
|
||||
path = "examples/3d/irradiance_volumes.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.irradiance_volumes]
|
||||
name = "Irradiance Volumes"
|
||||
description = "Demonstrates irradiance volumes"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "meshlet"
|
||||
path = "examples/3d/meshlet.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["meshlet"]
|
||||
|
||||
[package.metadata.example.meshlet]
|
||||
name = "Meshlet"
|
||||
description = "Meshlet rendering for dense high-poly scenes (experimental)"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
setup = [
|
||||
[
|
||||
"curl",
|
||||
"-o",
|
||||
"assets/models/bunny.meshlet_mesh",
|
||||
"https://raw.githubusercontent.com/JMS55/bevy_meshlet_asset/bd869887bc5c9c6e74e353f657d342bef84bacd8/bunny.meshlet_mesh",
|
||||
],
|
||||
]
|
||||
|
||||
[[example]]
|
||||
name = "lightmaps"
|
||||
path = "examples/3d/lightmaps.rs"
|
||||
|
@ -967,6 +1059,17 @@ description = "Plays an animation from a skinned glTF"
|
|||
category = "Animation"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "animation_graph"
|
||||
path = "examples/animation/animation_graph.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.animation_graph]
|
||||
name = "Animation Graph"
|
||||
description = "Blends multiple animations together with a graph"
|
||||
category = "Animation"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "morph_targets"
|
||||
path = "examples/animation/morph_targets.rs"
|
||||
|
@ -989,6 +1092,17 @@ description = "Create and play an animation defined by code that operates on the
|
|||
category = "Animation"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "color_animation"
|
||||
path = "examples/animation/color_animation.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.color_animation]
|
||||
name = "Color animation"
|
||||
description = "Demonstrates how to animate colors using mixing and splines in different color spaces"
|
||||
category = "Animation"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "cubic_curve"
|
||||
path = "examples/animation/cubic_curve.rs"
|
||||
|
@ -1092,6 +1206,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "log_layers"
|
||||
path = "examples/app/log_layers.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.log_layers]
|
||||
name = "Log layers"
|
||||
|
@ -1099,6 +1214,17 @@ description = "Illustrate how to add custom log layers"
|
|||
category = "Application"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "log_layers_ecs"
|
||||
path = "examples/app/log_layers_ecs.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.log_layers_ecs]
|
||||
name = "Advanced log layers"
|
||||
description = "Illustrate how to transfer data between log layers and Bevy's ECS"
|
||||
category = "Application"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "plugin"
|
||||
path = "examples/app/plugin.rs"
|
||||
|
@ -1154,6 +1280,17 @@ description = "An application that runs with default plugins and displays an emp
|
|||
category = "Application"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "headless_renderer"
|
||||
path = "examples/app/headless_renderer.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.headless_renderer]
|
||||
name = "Headless Renderer"
|
||||
description = "An application that runs with no window, but renders into image file"
|
||||
category = "Application"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "without_winit"
|
||||
path = "examples/app/without_winit.rs"
|
||||
|
@ -1177,6 +1314,17 @@ description = "Demonstrates various methods to load assets"
|
|||
category = "Assets"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "asset_settings"
|
||||
path = "examples/asset/asset_settings.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.asset_settings]
|
||||
name = "Asset Settings"
|
||||
description = "Demonstrates various methods of applying settings when loading an asset"
|
||||
category = "Assets"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "asset_decompression"
|
||||
path = "examples/asset/asset_decompression.rs"
|
||||
|
@ -1221,6 +1369,17 @@ description = "Embed an asset in the application binary and load it"
|
|||
category = "Assets"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "extra_asset_source"
|
||||
path = "examples/asset/extra_source.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.extra_asset_source]
|
||||
name = "Extra asset source"
|
||||
description = "Load an asset from a non-standard asset source"
|
||||
category = "Assets"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "hot_asset_reloading"
|
||||
path = "examples/asset/hot_asset_reloading.rs"
|
||||
|
@ -1245,6 +1404,17 @@ description = "Demonstrates how to process and load custom assets"
|
|||
category = "Assets"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "repeated_texture"
|
||||
path = "examples/asset/repeated_texture.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.repeated_texture]
|
||||
name = "Repeated texture configuration"
|
||||
description = "How to configure the texture to repeat instead of the default clamp to edges"
|
||||
category = "Assets"
|
||||
wasm = true
|
||||
|
||||
# Async Tasks
|
||||
[[example]]
|
||||
name = "async_compute"
|
||||
|
@ -1302,6 +1472,17 @@ description = "Shows how to create and register a custom audio source by impleme
|
|||
category = "Audio"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "soundtrack"
|
||||
path = "examples/audio/soundtrack.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.soundtrack]
|
||||
name = "Soundtrack"
|
||||
description = "Shows how to play different soundtracks based on game state"
|
||||
category = "Audio"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "spatial_audio_2d"
|
||||
path = "examples/audio/spatial_audio_2d.rs"
|
||||
|
@ -1327,6 +1508,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "pitch"
|
||||
path = "examples/audio/pitch.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.pitch]
|
||||
name = "Pitch"
|
||||
|
@ -1386,6 +1568,17 @@ description = "Change detection on components"
|
|||
category = "ECS (Entity Component System)"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "component_hooks"
|
||||
path = "examples/ecs/component_hooks.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.component_hooks]
|
||||
name = "Component Hooks"
|
||||
description = "Define component hooks to manage component lifecycle events"
|
||||
category = "ECS (Entity Component System)"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "custom_schedule"
|
||||
path = "examples/ecs/custom_schedule.rs"
|
||||
|
@ -1488,6 +1681,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "one_shot_systems"
|
||||
path = "examples/ecs/one_shot_systems.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.one_shot_systems]
|
||||
name = "One Shot Systems"
|
||||
|
@ -1541,13 +1735,35 @@ wasm = false
|
|||
|
||||
[[example]]
|
||||
name = "state"
|
||||
path = "examples/ecs/state.rs"
|
||||
path = "examples/state/state.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.state]
|
||||
name = "State"
|
||||
description = "Illustrates how to use States to control transitioning from a Menu state to an InGame state"
|
||||
category = "ECS (Entity Component System)"
|
||||
category = "State"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "sub_states"
|
||||
path = "examples/state/sub_states.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.sub_states]
|
||||
name = "Sub States"
|
||||
description = "Using Sub States for hierarchical state handling."
|
||||
category = "State"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "computed_states"
|
||||
path = "examples/state/computed_states.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.computed_states]
|
||||
name = "Computed States"
|
||||
description = "Advanced state patterns using Computed States"
|
||||
category = "State"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
|
@ -1587,10 +1803,11 @@ wasm = false
|
|||
name = "system_stepping"
|
||||
path = "examples/ecs/system_stepping.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["bevy_debug_stepping"]
|
||||
|
||||
[package.metadata.example.system_stepping]
|
||||
name = "System Stepping"
|
||||
description = "Demonstrate stepping through systems in order of execution"
|
||||
description = "Demonstrate stepping through systems in order of execution."
|
||||
category = "ECS (Entity Component System)"
|
||||
wasm = false
|
||||
|
||||
|
@ -1598,6 +1815,7 @@ wasm = false
|
|||
[[example]]
|
||||
name = "time"
|
||||
path = "examples/time/time.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.time]
|
||||
name = "Time handling"
|
||||
|
@ -1608,6 +1826,7 @@ wasm = false
|
|||
[[example]]
|
||||
name = "virtual_time"
|
||||
path = "examples/time/virtual_time.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.virtual_time]
|
||||
name = "Virtual time"
|
||||
|
@ -1646,7 +1865,7 @@ doc-scrape-examples = true
|
|||
|
||||
[package.metadata.example.breakout]
|
||||
name = "Breakout"
|
||||
description = "An implementation of the classic game \"Breakout\""
|
||||
description = "An implementation of the classic game \"Breakout\"."
|
||||
category = "Games"
|
||||
wasm = true
|
||||
|
||||
|
@ -1661,6 +1880,17 @@ description = "Displays each contributor as a bouncy bevy-ball!"
|
|||
category = "Games"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "desk_toy"
|
||||
path = "examples/games/desk_toy.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.desk_toy]
|
||||
name = "Desk Toy"
|
||||
description = "Bevy logo as a desk toy using transparent windows! Now with Googly Eyes!"
|
||||
category = "Games"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "game_menu"
|
||||
path = "examples/games/game_menu.rs"
|
||||
|
@ -1672,6 +1902,17 @@ description = "A simple game menu"
|
|||
category = "Games"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "loading_screen"
|
||||
path = "examples/games/loading_screen.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.loading_screen]
|
||||
name = "Loading Screen"
|
||||
description = "Demonstrates how to create a loading screen that waits for all assets to be loaded and render pipelines to be compiled."
|
||||
category = "Games"
|
||||
wasm = true
|
||||
|
||||
# Input
|
||||
[[example]]
|
||||
name = "char_input_events"
|
||||
|
@ -1828,6 +2069,28 @@ description = "Demonstrates how reflection in Bevy provides a way to dynamically
|
|||
category = "Reflection"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "custom_attributes"
|
||||
path = "examples/reflection/custom_attributes.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.custom_attributes]
|
||||
name = "Custom Attributes"
|
||||
description = "Registering and accessing custom attributes on reflected types"
|
||||
category = "Reflection"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "dynamic_types"
|
||||
path = "examples/reflection/dynamic_types.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.dynamic_types]
|
||||
name = "Dynamic Types"
|
||||
description = "How dynamic types are used with reflection"
|
||||
category = "Reflection"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "generic_reflection"
|
||||
path = "examples/reflection/generic_reflection.rs"
|
||||
|
@ -1942,6 +2205,7 @@ wasm = true
|
|||
[[example]]
|
||||
name = "extended_material"
|
||||
path = "examples/shader/extended_material.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.extended_material]
|
||||
name = "Extended Material"
|
||||
|
@ -2016,6 +2280,17 @@ description = "A compute shader that simulates Conway's Game of Life"
|
|||
category = "Shaders"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "gpu_readback"
|
||||
path = "examples/shader/gpu_readback.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.gpu_readback]
|
||||
name = "GPU readback"
|
||||
description = "A very simple compute shader that writes to a buffer that is read by the cpu"
|
||||
category = "Shaders"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "array_texture"
|
||||
path = "examples/shader/array_texture.rs"
|
||||
|
@ -2217,6 +2492,17 @@ description = "Illustrates how to (constantly) rotate an object around an axis"
|
|||
category = "Transforms"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "align"
|
||||
path = "examples/transforms/align.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.align]
|
||||
name = "Alignment"
|
||||
description = "A demonstration of Transform's axis-alignment feature"
|
||||
category = "Transforms"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "scale"
|
||||
path = "examples/transforms/scale.rs"
|
||||
|
@ -2262,6 +2548,17 @@ description = "Demonstrates how to create a node with a border"
|
|||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "rounded_borders"
|
||||
path = "examples/ui/rounded_borders.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.rounded_borders]
|
||||
name = "Rounded Borders"
|
||||
description = "Demonstrates how to create a node with a rounded border"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "button"
|
||||
path = "examples/ui/button.rs"
|
||||
|
@ -2472,6 +2769,28 @@ description = "Illustrates how to use TextureAtlases in UI"
|
|||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "ui_texture_slice"
|
||||
path = "examples/ui/ui_texture_slice.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.ui_texture_slice]
|
||||
name = "UI Texture Slice"
|
||||
description = "Illustrates how to use 9 Slicing in UI"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "ui_texture_atlas_slice"
|
||||
path = "examples/ui/ui_texture_atlas_slice.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.ui_texture_atlas_slice]
|
||||
name = "UI Texture Atlas Slice"
|
||||
description = "Illustrates how to use 9 Slicing for TextureAtlases in UI"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "viewport_debug"
|
||||
path = "examples/ui/viewport_debug.rs"
|
||||
|
@ -2618,6 +2937,130 @@ description = "Demonstrates creating and using custom Ui materials"
|
|||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "render_primitives"
|
||||
path = "examples/math/render_primitives.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.render_primitives]
|
||||
name = "Rendering Primitives"
|
||||
description = "Shows off rendering for all math primitives as both Meshes and Gizmos"
|
||||
category = "Math"
|
||||
wasm = true
|
||||
|
||||
# Gizmos
|
||||
[[example]]
|
||||
name = "2d_gizmos"
|
||||
path = "examples/gizmos/2d_gizmos.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.2d_gizmos]
|
||||
name = "2D Gizmos"
|
||||
description = "A scene showcasing 2D gizmos"
|
||||
category = "Gizmos"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "3d_gizmos"
|
||||
path = "examples/gizmos/3d_gizmos.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.3d_gizmos]
|
||||
name = "3D Gizmos"
|
||||
description = "A scene showcasing 3D gizmos"
|
||||
category = "Gizmos"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "axes"
|
||||
path = "examples/gizmos/axes.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.axes]
|
||||
name = "Axes"
|
||||
description = "Demonstrates the function of axes gizmos"
|
||||
category = "Gizmos"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "light_gizmos"
|
||||
path = "examples/gizmos/light_gizmos.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.light_gizmos]
|
||||
name = "Light Gizmos"
|
||||
description = "A scene showcasing light gizmos"
|
||||
category = "Gizmos"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "fps_overlay"
|
||||
path = "examples/dev_tools/fps_overlay.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["bevy_dev_tools"]
|
||||
|
||||
[package.metadata.example.fps_overlay]
|
||||
name = "FPS overlay"
|
||||
description = "Demonstrates FPS overlay"
|
||||
category = "Dev tools"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "visibility_range"
|
||||
path = "examples/3d/visibility_range.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.visibility_range]
|
||||
name = "Visibility range"
|
||||
description = "Demonstrates visibility ranges"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "color_grading"
|
||||
path = "examples/3d/color_grading.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.color_grading]
|
||||
name = "Color grading"
|
||||
description = "Demonstrates color grading"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "clearcoat"
|
||||
path = "examples/3d/clearcoat.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["pbr_multi_layer_material_textures"]
|
||||
|
||||
[package.metadata.example.clearcoat]
|
||||
name = "Clearcoat"
|
||||
description = "Demonstrates the clearcoat PBR feature"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "depth_of_field"
|
||||
path = "examples/3d/depth_of_field.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.depth_of_field]
|
||||
name = "Depth of field"
|
||||
description = "Demonstrates depth of field"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "volumetric_fog"
|
||||
path = "examples/3d/volumetric_fog.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.volumetric_fog]
|
||||
name = "Volumetric fog"
|
||||
description = "Demonstrates volumetric fog and lighting"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[profile.wasm-release]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
|
@ -2630,4 +3073,6 @@ lto = "fat"
|
|||
panic = "abort"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]
|
||||
|
|
12
README.md
|
@ -13,7 +13,7 @@ Bevy is a refreshingly simple data-driven game engine built in Rust. It is free
|
|||
|
||||
## WARNING
|
||||
|
||||
Bevy is still in the early stages of development. Important features are missing. Documentation is sparse. A new version of Bevy containing breaking changes to the API is released [approximately once every 3 months](https://bevyengine.org/news/bevy-0-6/#the-train-release-schedule). We provide [migration guides](https://bevyengine.org/learn/book/migration-guides/), but we can't guarantee migrations will always be easy. Use only if you are willing to work in this environment.
|
||||
Bevy is still in the early stages of development. Important features are missing. Documentation is sparse. A new version of Bevy containing breaking changes to the API is released [approximately once every 3 months](https://bevyengine.org/news/bevy-0-6/#the-train-release-schedule). We provide [migration guides](https://bevyengine.org/learn/migration-guides/), but we can't guarantee migrations will always be easy. Use only if you are willing to work in this environment.
|
||||
|
||||
**MSRV:** Bevy relies heavily on improvements in the Rust language and compiler.
|
||||
As a result, the Minimum Supported Rust Version (MSRV) is generally close to "the latest stable release" of Rust.
|
||||
|
@ -34,7 +34,7 @@ As a result, the Minimum Supported Rust Version (MSRV) is generally close to "th
|
|||
|
||||
## Docs
|
||||
|
||||
* **[The Bevy Book](https://bevyengine.org/learn/book/introduction):** Bevy's official documentation. The best place to start learning Bevy.
|
||||
* **[Quick Start Guide](https://bevyengine.org/learn/quick-start/introduction):** Bevy's official Quick Start Guide. The best place to start learning Bevy.
|
||||
* **[Bevy Rust API Docs](https://docs.rs/bevy):** Bevy's Rust API docs, which are automatically generated from the doc comments in this repo.
|
||||
* **[Official Examples](https://github.com/bevyengine/bevy/tree/latest/examples):** Bevy's dedicated, runnable examples, which are great for digging into specific concepts.
|
||||
* **[Community-Made Learning Resources](https://bevyengine.org/assets/#learning)**: More tutorials, documentation, and examples made by the Bevy community.
|
||||
|
@ -58,9 +58,9 @@ For more complex architecture decisions and experimental mad science, please ope
|
|||
|
||||
## Getting Started
|
||||
|
||||
We recommend checking out [The Bevy Book](https://bevyengine.org/learn/book/introduction) for a full tutorial.
|
||||
We recommend checking out the [Quick Start Guide](https://bevyengine.org/learn/quick-start/introduction) for a brief introduction.
|
||||
|
||||
Follow the [Setup guide](https://bevyengine.org/learn/book/getting-started/setup/) to ensure your development environment is set up correctly.
|
||||
Follow the [Setup guide](https://bevyengine.org/learn/quick-start/getting-started/setup) to ensure your development environment is set up correctly.
|
||||
Once set up, you can quickly try out the [examples](https://github.com/bevyengine/bevy/tree/latest/examples) by cloning this repo and running the following commands:
|
||||
|
||||
```sh
|
||||
|
@ -84,7 +84,7 @@ fn main(){
|
|||
|
||||
### Fast Compiles
|
||||
|
||||
Bevy can be built just fine using default configuration on stable Rust. However for really fast iterative compiles, you should enable the "fast compiles" setup by [following the instructions here](http://bevyengine.org/learn/book/getting-started/setup/).
|
||||
Bevy can be built just fine using default configuration on stable Rust. However for really fast iterative compiles, you should enable the "fast compiles" setup by [following the instructions here](https://bevyengine.org/learn/quick-start/getting-started/setup).
|
||||
|
||||
## [Bevy Cargo Features][cargo_features]
|
||||
|
||||
|
@ -96,7 +96,7 @@ This [list][cargo_features] outlines the different cargo features supported by B
|
|||
|
||||
Bevy is the result of the hard work of many people. A huge thanks to all Bevy contributors, the many open source projects that have come before us, the [Rust gamedev ecosystem](https://arewegameyet.rs/), and the many libraries we build on.
|
||||
|
||||
A huge thanks to Bevy's [generous sponsors](https://bevyengine.org). Bevy will always be free and open source, but it isn't free to make. Please consider [sponsoring our work](https://bevyengine.org/community/donate/) if you like what we're building.
|
||||
A huge thanks to Bevy's [generous sponsors](https://bevyengine.org). Bevy will always be free and open source, but it isn't free to make. Please consider [sponsoring our work](https://bevyengine.org/donate/) if you like what we're building.
|
||||
|
||||
<!-- This next line need to stay exactly as is. It is required for BrowserStack sponsorship. -->
|
||||
This project is tested with BrowserStack.
|
||||
|
|
35
assets/animation_graphs/Fox.animgraph.ron
Normal file
|
@ -0,0 +1,35 @@
|
|||
(
|
||||
graph: (
|
||||
nodes: [
|
||||
(
|
||||
clip: None,
|
||||
weight: 1.0,
|
||||
),
|
||||
(
|
||||
clip: None,
|
||||
weight: 0.5,
|
||||
),
|
||||
(
|
||||
clip: Some(AssetPath("models/animated/Fox.glb#Animation0")),
|
||||
weight: 1.0,
|
||||
),
|
||||
(
|
||||
clip: Some(AssetPath("models/animated/Fox.glb#Animation1")),
|
||||
weight: 1.0,
|
||||
),
|
||||
(
|
||||
clip: Some(AssetPath("models/animated/Fox.glb#Animation2")),
|
||||
weight: 1.0,
|
||||
),
|
||||
],
|
||||
node_holes: [],
|
||||
edge_property: directed,
|
||||
edges: [
|
||||
Some((0, 1, ())),
|
||||
Some((0, 2, ())),
|
||||
Some((1, 3, ())),
|
||||
Some((1, 4, ())),
|
||||
],
|
||||
),
|
||||
root: 0,
|
||||
)
|
BIN
assets/irradiance_volumes/Example.vxgi.ktx2
Normal file
BIN
assets/models/DepthOfFieldExample/CircuitBoardLightmap.hdr
Normal file
BIN
assets/models/DepthOfFieldExample/DepthOfFieldExample.glb
Normal file
BIN
assets/models/FlightHelmetLowPoly/FlightHelmetLowPoly.bin
Normal file
1739
assets/models/FlightHelmetLowPoly/FlightHelmetLowPoly.gltf
Normal file
BIN
assets/models/GolfBall/GolfBall.glb
Normal file
BIN
assets/models/TonemappingTest/TestPattern.png
Normal file
After Width: | Height: | Size: 361 B |
BIN
assets/models/TonemappingTest/TonemappingTest.bin
Normal file
679
assets/models/TonemappingTest/TonemappingTest.gltf
Normal file
|
@ -0,0 +1,679 @@
|
|||
{
|
||||
"asset":{
|
||||
"generator":"Khronos glTF Blender I/O v4.0.44",
|
||||
"version":"2.0"
|
||||
},
|
||||
"scene":0,
|
||||
"scenes":[
|
||||
{
|
||||
"name":"Scene",
|
||||
"nodes":[
|
||||
0,
|
||||
1,
|
||||
2
|
||||
]
|
||||
}
|
||||
],
|
||||
"nodes":[
|
||||
{
|
||||
"mesh":0,
|
||||
"name":"Plane",
|
||||
"scale":[
|
||||
50,
|
||||
1,
|
||||
50
|
||||
]
|
||||
},
|
||||
{
|
||||
"mesh":1,
|
||||
"name":"Cube",
|
||||
"translation":[
|
||||
-1,
|
||||
0.125,
|
||||
0
|
||||
]
|
||||
},
|
||||
{
|
||||
"mesh":2,
|
||||
"name":"Sphere",
|
||||
"translation":[
|
||||
0,
|
||||
0.125,
|
||||
0
|
||||
]
|
||||
}
|
||||
],
|
||||
"materials":[
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Material.001",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
0.10000000149011612,
|
||||
0.20000000298023224,
|
||||
0.10000000149011612,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Material.002",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorTexture":{
|
||||
"index":0
|
||||
},
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Sphere0",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.08900000154972076
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Sphere1",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.08900000154972076
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Sphere2",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.08900000154972076
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Sphere3",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
0.20000000298023224,
|
||||
0.20000000298023224,
|
||||
1,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.08900000154972076
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Sphere4",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
0.20000000298023224,
|
||||
1,
|
||||
0.20000000298023224,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.08900000154972076
|
||||
}
|
||||
},
|
||||
{
|
||||
"doubleSided":true,
|
||||
"name":"Sphere5",
|
||||
"pbrMetallicRoughness":{
|
||||
"baseColorFactor":[
|
||||
1,
|
||||
0.20000000298023224,
|
||||
0.20000000298023224,
|
||||
1
|
||||
],
|
||||
"metallicFactor":0,
|
||||
"roughnessFactor":0.08900000154972076
|
||||
}
|
||||
}
|
||||
],
|
||||
"meshes":[
|
||||
{
|
||||
"name":"Plane",
|
||||
"primitives":[
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":0,
|
||||
"NORMAL":1,
|
||||
"TEXCOORD_0":2
|
||||
},
|
||||
"indices":3,
|
||||
"material":0
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name":"Cube.001",
|
||||
"primitives":[
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":4,
|
||||
"NORMAL":5,
|
||||
"TEXCOORD_0":6
|
||||
},
|
||||
"indices":7,
|
||||
"material":1
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name":"Sphere",
|
||||
"primitives":[
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":8,
|
||||
"NORMAL":9,
|
||||
"TEXCOORD_0":10
|
||||
},
|
||||
"indices":11,
|
||||
"material":2
|
||||
},
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":12,
|
||||
"NORMAL":13,
|
||||
"TEXCOORD_0":14
|
||||
},
|
||||
"indices":11,
|
||||
"material":3
|
||||
},
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":15,
|
||||
"NORMAL":16,
|
||||
"TEXCOORD_0":17
|
||||
},
|
||||
"indices":11,
|
||||
"material":4
|
||||
},
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":18,
|
||||
"NORMAL":19,
|
||||
"TEXCOORD_0":20
|
||||
},
|
||||
"indices":11,
|
||||
"material":5
|
||||
},
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":21,
|
||||
"NORMAL":22,
|
||||
"TEXCOORD_0":23
|
||||
},
|
||||
"indices":11,
|
||||
"material":6
|
||||
},
|
||||
{
|
||||
"attributes":{
|
||||
"POSITION":24,
|
||||
"NORMAL":25,
|
||||
"TEXCOORD_0":26
|
||||
},
|
||||
"indices":11,
|
||||
"material":7
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"textures":[
|
||||
{
|
||||
"sampler":0,
|
||||
"source":0
|
||||
}
|
||||
],
|
||||
"images":[
|
||||
{
|
||||
"mimeType":"image/png",
|
||||
"name":"TestPattern",
|
||||
"uri":"TestPattern.png"
|
||||
}
|
||||
],
|
||||
"accessors":[
|
||||
{
|
||||
"bufferView":0,
|
||||
"componentType":5126,
|
||||
"count":4,
|
||||
"max":[
|
||||
1,
|
||||
0,
|
||||
1
|
||||
],
|
||||
"min":[
|
||||
-1,
|
||||
0,
|
||||
-1
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":1,
|
||||
"componentType":5126,
|
||||
"count":4,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":2,
|
||||
"componentType":5126,
|
||||
"count":4,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":3,
|
||||
"componentType":5123,
|
||||
"count":6,
|
||||
"type":"SCALAR"
|
||||
},
|
||||
{
|
||||
"bufferView":4,
|
||||
"componentType":5126,
|
||||
"count":120,
|
||||
"max":[
|
||||
1.125,
|
||||
0.125,
|
||||
0.125
|
||||
],
|
||||
"min":[
|
||||
-0.125,
|
||||
-0.125,
|
||||
-2.125
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":5,
|
||||
"componentType":5126,
|
||||
"count":120,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":6,
|
||||
"componentType":5126,
|
||||
"count":120,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":7,
|
||||
"componentType":5123,
|
||||
"count":180,
|
||||
"type":"SCALAR"
|
||||
},
|
||||
{
|
||||
"bufferView":8,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"max":[
|
||||
-0.42500004172325134,
|
||||
0.125,
|
||||
0.37499991059303284
|
||||
],
|
||||
"min":[
|
||||
-0.6749998927116394,
|
||||
-0.125,
|
||||
0.125
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":9,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":10,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":11,
|
||||
"componentType":5123,
|
||||
"count":3264,
|
||||
"type":"SCALAR"
|
||||
},
|
||||
{
|
||||
"bufferView":12,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"max":[
|
||||
-0.17500004172325134,
|
||||
0.125,
|
||||
0.12499991804361343
|
||||
],
|
||||
"min":[
|
||||
-0.4249998927116394,
|
||||
-0.125,
|
||||
-0.125
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":13,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":14,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":15,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"max":[
|
||||
0.07499995082616806,
|
||||
0.125,
|
||||
-0.12500005960464478
|
||||
],
|
||||
"min":[
|
||||
-0.1749998927116394,
|
||||
-0.125,
|
||||
-0.3749999701976776
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":16,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":17,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":18,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"max":[
|
||||
-0.1250000298023224,
|
||||
0.125,
|
||||
0.6749999523162842
|
||||
],
|
||||
"min":[
|
||||
-0.37499988079071045,
|
||||
-0.125,
|
||||
0.42500001192092896
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":19,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":20,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":21,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"max":[
|
||||
0.12499996274709702,
|
||||
0.125,
|
||||
0.4249999225139618
|
||||
],
|
||||
"min":[
|
||||
-0.12499988079071045,
|
||||
-0.125,
|
||||
0.17500001192092896
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":22,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":23,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC2"
|
||||
},
|
||||
{
|
||||
"bufferView":24,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"max":[
|
||||
0.3749999403953552,
|
||||
0.125,
|
||||
0.1749999225139618
|
||||
],
|
||||
"min":[
|
||||
0.12500008940696716,
|
||||
-0.125,
|
||||
-0.07499998807907104
|
||||
],
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":25,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC3"
|
||||
},
|
||||
{
|
||||
"bufferView":26,
|
||||
"componentType":5126,
|
||||
"count":625,
|
||||
"type":"VEC2"
|
||||
}
|
||||
],
|
||||
"bufferViews":[
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":48,
|
||||
"byteOffset":0,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":48,
|
||||
"byteOffset":48,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":32,
|
||||
"byteOffset":96,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":12,
|
||||
"byteOffset":128,
|
||||
"target":34963
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":1440,
|
||||
"byteOffset":140,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":1440,
|
||||
"byteOffset":1580,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":960,
|
||||
"byteOffset":3020,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":360,
|
||||
"byteOffset":3980,
|
||||
"target":34963
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":4340,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":11840,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":5000,
|
||||
"byteOffset":19340,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":6528,
|
||||
"byteOffset":24340,
|
||||
"target":34963
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":30868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":38368,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":5000,
|
||||
"byteOffset":45868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":50868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":58368,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":5000,
|
||||
"byteOffset":65868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":70868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":78368,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":5000,
|
||||
"byteOffset":85868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":90868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":98368,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":5000,
|
||||
"byteOffset":105868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":110868,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":7500,
|
||||
"byteOffset":118368,
|
||||
"target":34962
|
||||
},
|
||||
{
|
||||
"buffer":0,
|
||||
"byteLength":5000,
|
||||
"byteOffset":125868,
|
||||
"target":34962
|
||||
}
|
||||
],
|
||||
"samplers":[
|
||||
{
|
||||
"magFilter":9728,
|
||||
"minFilter":9984
|
||||
}
|
||||
],
|
||||
"buffers":[
|
||||
{
|
||||
"byteLength":130868,
|
||||
"uri":"TonemappingTest.bin"
|
||||
}
|
||||
]
|
||||
}
|
BIN
assets/models/VolumetricFogExample/VolumetricFogExample.glb
Normal file
|
@ -3,6 +3,7 @@
|
|||
mesh_view_bindings::view,
|
||||
pbr_types::{STANDARD_MATERIAL_FLAGS_DOUBLE_SIDED_BIT, PbrInput, pbr_input_new},
|
||||
pbr_functions as fns,
|
||||
pbr_bindings,
|
||||
}
|
||||
#import bevy_core_pipeline::tonemapping::tone_mapping
|
||||
|
||||
|
@ -37,19 +38,21 @@ fn fragment(
|
|||
|
||||
pbr_input.is_orthographic = view.projection[3].w == 1.0;
|
||||
|
||||
pbr_input.N = normalize(pbr_input.world_normal);
|
||||
|
||||
#ifdef VERTEX_TANGENTS
|
||||
let Nt = textureSampleBias(pbr_bindings::normal_map_texture, pbr_bindings::normal_map_sampler, mesh.uv, view.mip_bias).rgb;
|
||||
pbr_input.N = fns::apply_normal_mapping(
|
||||
pbr_input.material.flags,
|
||||
mesh.world_normal,
|
||||
double_sided,
|
||||
is_front,
|
||||
#ifdef VERTEX_TANGENTS
|
||||
#ifdef STANDARD_MATERIAL_NORMAL_MAP
|
||||
mesh.world_tangent,
|
||||
#endif
|
||||
#endif
|
||||
mesh.uv,
|
||||
Nt,
|
||||
view.mip_bias,
|
||||
);
|
||||
#endif
|
||||
|
||||
pbr_input.V = fns::calculate_view(mesh.world_position, pbr_input.is_orthographic);
|
||||
|
||||
return tone_mapping(fns::apply_pbr_lighting(pbr_input), view.color_grading);
|
||||
|
|
|
@ -1,4 +1,12 @@
|
|||
@group(0) @binding(0) var texture: texture_storage_2d<rgba8unorm, read_write>;
|
||||
// The shader reads the previous frame's state from the `input` texture, and writes the new state of
|
||||
// each pixel to the `output` texture. The textures are flipped each step to progress the
|
||||
// simulation.
|
||||
// Two textures are needed for the game of life as each pixel of step N depends on the state of its
|
||||
// neighbors at step N-1.
|
||||
|
||||
@group(0) @binding(0) var input: texture_storage_2d<r32float, read>;
|
||||
|
||||
@group(0) @binding(1) var output: texture_storage_2d<r32float, write>;
|
||||
|
||||
fn hash(value: u32) -> u32 {
|
||||
var state = value;
|
||||
|
@ -19,15 +27,15 @@ fn randomFloat(value: u32) -> f32 {
|
|||
fn init(@builtin(global_invocation_id) invocation_id: vec3<u32>, @builtin(num_workgroups) num_workgroups: vec3<u32>) {
|
||||
let location = vec2<i32>(i32(invocation_id.x), i32(invocation_id.y));
|
||||
|
||||
let randomNumber = randomFloat(invocation_id.y * num_workgroups.x + invocation_id.x);
|
||||
let randomNumber = randomFloat(invocation_id.y << 16u | invocation_id.x);
|
||||
let alive = randomNumber > 0.9;
|
||||
let color = vec4<f32>(f32(alive));
|
||||
|
||||
textureStore(texture, location, color);
|
||||
textureStore(output, location, color);
|
||||
}
|
||||
|
||||
fn is_alive(location: vec2<i32>, offset_x: i32, offset_y: i32) -> i32 {
|
||||
let value: vec4<f32> = textureLoad(texture, location + vec2<i32>(offset_x, offset_y));
|
||||
let value: vec4<f32> = textureLoad(input, location + vec2<i32>(offset_x, offset_y));
|
||||
return i32(value.x);
|
||||
}
|
||||
|
||||
|
@ -59,7 +67,5 @@ fn update(@builtin(global_invocation_id) invocation_id: vec3<u32>) {
|
|||
}
|
||||
let color = vec4<f32>(f32(alive));
|
||||
|
||||
storageBarrier();
|
||||
|
||||
textureStore(texture, location, color);
|
||||
}
|
||||
textureStore(output, location, color);
|
||||
}
|
||||
|
|
12
assets/shaders/gpu_readback.wgsl
Normal file
|
@ -0,0 +1,12 @@
|
|||
// This shader is used for the gpu_readback example
|
||||
// The actual work it does is not important for the example
|
||||
|
||||
// This is the data that lives in the gpu only buffer
|
||||
@group(0) @binding(0) var<storage, read_write> data: array<u32>;
|
||||
|
||||
@compute @workgroup_size(1)
|
||||
fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
|
||||
// We use the global_id to index the array to make sure we don't
|
||||
// access data used in another workgroup
|
||||
data[global_id.x] += 1u;
|
||||
}
|
35
assets/shaders/irradiance_volume_voxel_visualization.wgsl
Normal file
|
@ -0,0 +1,35 @@
|
|||
#import bevy_pbr::forward_io::VertexOutput
|
||||
#import bevy_pbr::irradiance_volume
|
||||
#import bevy_pbr::mesh_view_bindings
|
||||
|
||||
struct VoxelVisualizationIrradianceVolumeInfo {
|
||||
transform: mat4x4<f32>,
|
||||
inverse_transform: mat4x4<f32>,
|
||||
resolution: vec3<u32>,
|
||||
// A scale factor that's applied to the diffuse and specular light from the
|
||||
// light probe. This is in units of cd/m² (candela per square meter).
|
||||
intensity: f32,
|
||||
}
|
||||
|
||||
@group(2) @binding(100)
|
||||
var<uniform> irradiance_volume_info: VoxelVisualizationIrradianceVolumeInfo;
|
||||
|
||||
@fragment
|
||||
fn fragment(mesh: VertexOutput) -> @location(0) vec4<f32> {
|
||||
// Snap the world position we provide to `irradiance_volume_light()` to the
|
||||
// middle of the nearest texel.
|
||||
var unit_pos = (irradiance_volume_info.inverse_transform *
|
||||
vec4(mesh.world_position.xyz, 1.0f)).xyz;
|
||||
let resolution = vec3<f32>(irradiance_volume_info.resolution);
|
||||
let stp = clamp((unit_pos + 0.5) * resolution, vec3(0.5f), resolution - vec3(0.5f));
|
||||
let stp_rounded = round(stp - 0.5f) + 0.5f;
|
||||
let rounded_world_pos = (irradiance_volume_info.transform * vec4(stp_rounded, 1.0f)).xyz;
|
||||
|
||||
// `irradiance_volume_light()` multiplies by intensity, so cancel it out.
|
||||
// If we take intensity into account, the cubes will be way too bright.
|
||||
let rgb = irradiance_volume::irradiance_volume_light(
|
||||
mesh.world_position.xyz,
|
||||
mesh.world_normal) / irradiance_volume_info.intensity;
|
||||
|
||||
return vec4<f32>(rgb, 1.0f);
|
||||
}
|
|
@ -1,9 +1,10 @@
|
|||
#import bevy_pbr::{
|
||||
mesh_view_bindings,
|
||||
forward_io::VertexOutput,
|
||||
utils::PI,
|
||||
}
|
||||
|
||||
#import bevy_render::maths::PI
|
||||
|
||||
#ifdef TONEMAP_IN_SHADER
|
||||
#import bevy_core_pipeline::tonemapping::tone_mapping
|
||||
#endif
|
||||
|
|
BIN
assets/sounds/Epic orchestra music.ogg
Normal file
BIN
assets/sounds/Mysterious acoustic guitar.ogg
Normal file
BIN
assets/textures/BlueNoise-Normal.png
Normal file
After Width: | Height: | Size: 551 KiB |
BIN
assets/textures/ScratchedGold-Normal.png
Normal file
After Width: | Height: | Size: 1 MiB |
BIN
assets/textures/basic_metering_mask.png
Normal file
After Width: | Height: | Size: 447 B |
30
assets/textures/fantasy_ui_borders/License.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
|
||||
Fantasy UI Borders (1.0)
|
||||
|
||||
Created/distributed by Kenney (www.kenney.nl)
|
||||
Creation date: 03-12-2023
|
||||
|
||||
For the sample image the font 'Aoboshi One' was used, OPL (Open Font License)
|
||||
|
||||
------------------------------
|
||||
|
||||
License: (Creative Commons Zero, CC0)
|
||||
http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
You can use this content for personal, educational, and commercial purposes.
|
||||
|
||||
Support by crediting 'Kenney' or 'www.kenney.nl' (this is not a requirement)
|
||||
|
||||
------------------------------
|
||||
|
||||
• Website : www.kenney.nl
|
||||
• Donate : www.kenney.nl/donate
|
||||
|
||||
• Patreon : patreon.com/kenney
|
||||
|
||||
Follow on social media for updates:
|
||||
|
||||
• Twitter: twitter.com/KenneyNL
|
||||
• Instagram: instagram.com/kenney_nl
|
||||
• Mastodon: mastodon.gamedev.place/@kenney
|
BIN
assets/textures/fantasy_ui_borders/border_sheet.png
Normal file
After Width: | Height: | Size: 10 KiB |
BIN
assets/textures/fantasy_ui_borders/panel-border-010-repeated.png
Normal file
After Width: | Height: | Size: 170 B |
BIN
assets/textures/fantasy_ui_borders/panel-border-010.png
Normal file
After Width: | Height: | Size: 170 B |
BIN
assets/textures/fantasy_ui_borders/panel-border-015.png
Normal file
After Width: | Height: | Size: 136 B |
|
@ -1,22 +1,22 @@
|
|||
[package]
|
||||
name = "benches"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "Benchmarks for Bevy engine"
|
||||
description = "Benchmarks that test Bevy's performance"
|
||||
publish = false
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
glam = "0.25"
|
||||
glam = "0.27"
|
||||
rand = "0.8"
|
||||
rand_chacha = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
bevy_app = { path = "../crates/bevy_app" }
|
||||
bevy_ecs = { path = "../crates/bevy_ecs", features = ["multi-threaded"] }
|
||||
bevy_ecs = { path = "../crates/bevy_ecs", features = ["multi_threaded"] }
|
||||
bevy_reflect = { path = "../crates/bevy_reflect" }
|
||||
bevy_tasks = { path = "../crates/bevy_tasks" }
|
||||
bevy_utils = { path = "../crates/bevy_utils" }
|
||||
bevy_math = { path = "../crates/bevy_math" }
|
||||
bevy_render = { path = "../crates/bevy_render" }
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
|
@ -63,6 +63,11 @@ path = "benches/bevy_math/bezier.rs"
|
|||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "utils"
|
||||
path = "benches/bevy_utils/entity_hash.rs"
|
||||
name = "torus"
|
||||
path = "benches/bevy_render/torus.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "entity_hash"
|
||||
path = "benches/bevy_ecs/world/entity_hash.rs"
|
||||
harness = false
|
||||
|
|
|
@ -18,6 +18,7 @@ mod iter_simple_sparse_set;
|
|||
mod iter_simple_system;
|
||||
mod iter_simple_wide;
|
||||
mod iter_simple_wide_sparse_set;
|
||||
mod par_iter_simple;
|
||||
|
||||
use heavy_compute::*;
|
||||
|
||||
|
@ -27,6 +28,7 @@ criterion_group!(
|
|||
iter_frag_sparse,
|
||||
iter_simple,
|
||||
heavy_compute,
|
||||
par_iter_simple,
|
||||
);
|
||||
|
||||
fn iter_simple(c: &mut Criterion) {
|
||||
|
@ -117,3 +119,15 @@ fn iter_frag_sparse(c: &mut Criterion) {
|
|||
});
|
||||
group.finish();
|
||||
}
|
||||
|
||||
fn par_iter_simple(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("par_iter_simple");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
for f in [0, 10, 100, 1000] {
|
||||
group.bench_function(format!("with_{}_fragment", f), |b| {
|
||||
let mut bench = par_iter_simple::Benchmark::new(f);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
73
benches/benches/bevy_ecs/iteration/par_iter_simple.rs
Normal file
|
@ -0,0 +1,73 @@
|
|||
use bevy_ecs::prelude::*;
|
||||
use bevy_tasks::{ComputeTaskPool, TaskPool};
|
||||
use glam::*;
|
||||
|
||||
#[derive(Component, Copy, Clone)]
|
||||
struct Transform(Mat4);
|
||||
|
||||
#[derive(Component, Copy, Clone)]
|
||||
struct Position(Vec3);
|
||||
|
||||
#[derive(Component, Copy, Clone)]
|
||||
struct Rotation(Vec3);
|
||||
|
||||
#[derive(Component, Copy, Clone)]
|
||||
struct Velocity(Vec3);
|
||||
|
||||
#[derive(Component, Copy, Clone, Default)]
|
||||
struct Data<const X: u16>(f32);
|
||||
pub struct Benchmark<'w>(World, QueryState<(&'w Velocity, &'w mut Position)>);
|
||||
|
||||
fn insert_if_bit_enabled<const B: u16>(entity: &mut EntityWorldMut, i: u16) {
|
||||
if i & 1 << B != 0 {
|
||||
entity.insert(Data::<B>(1.0));
|
||||
}
|
||||
}
|
||||
|
||||
impl<'w> Benchmark<'w> {
|
||||
pub fn new(fragment: u16) -> Self {
|
||||
ComputeTaskPool::get_or_init(TaskPool::default);
|
||||
|
||||
let mut world = World::new();
|
||||
|
||||
let iter = world.spawn_batch(
|
||||
std::iter::repeat((
|
||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||
Position(Vec3::X),
|
||||
Rotation(Vec3::X),
|
||||
Velocity(Vec3::X),
|
||||
))
|
||||
.take(100_000),
|
||||
);
|
||||
let entities = iter.into_iter().collect::<Vec<Entity>>();
|
||||
for i in 0..fragment {
|
||||
let mut e = world.entity_mut(entities[i as usize]);
|
||||
insert_if_bit_enabled::<0>(&mut e, i);
|
||||
insert_if_bit_enabled::<1>(&mut e, i);
|
||||
insert_if_bit_enabled::<2>(&mut e, i);
|
||||
insert_if_bit_enabled::<3>(&mut e, i);
|
||||
insert_if_bit_enabled::<4>(&mut e, i);
|
||||
insert_if_bit_enabled::<5>(&mut e, i);
|
||||
insert_if_bit_enabled::<6>(&mut e, i);
|
||||
insert_if_bit_enabled::<7>(&mut e, i);
|
||||
insert_if_bit_enabled::<8>(&mut e, i);
|
||||
insert_if_bit_enabled::<9>(&mut e, i);
|
||||
insert_if_bit_enabled::<10>(&mut e, i);
|
||||
insert_if_bit_enabled::<11>(&mut e, i);
|
||||
insert_if_bit_enabled::<12>(&mut e, i);
|
||||
insert_if_bit_enabled::<13>(&mut e, i);
|
||||
insert_if_bit_enabled::<14>(&mut e, i);
|
||||
insert_if_bit_enabled::<15>(&mut e, i);
|
||||
}
|
||||
|
||||
let query = world.query::<(&Velocity, &mut Position)>();
|
||||
Self(world, query)
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
pub fn run(&mut self) {
|
||||
self.1
|
||||
.par_iter_mut(&mut self.0)
|
||||
.for_each(|(v, mut p)| p.0 += v.0);
|
||||
}
|
||||
}
|
|
@ -19,4 +19,5 @@ criterion_group!(
|
|||
contrived,
|
||||
schedule,
|
||||
build_schedule,
|
||||
empty_schedule_run,
|
||||
);
|
||||
|
|
|
@ -118,3 +118,28 @@ pub fn build_schedule(criterion: &mut Criterion) {
|
|||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn empty_schedule_run(criterion: &mut Criterion) {
|
||||
let mut app = bevy_app::App::default();
|
||||
|
||||
let mut group = criterion.benchmark_group("run_empty_schedule");
|
||||
|
||||
let mut schedule = Schedule::default();
|
||||
schedule.set_executor_kind(bevy_ecs::schedule::ExecutorKind::SingleThreaded);
|
||||
group.bench_function("SingleThreaded", |bencher| {
|
||||
bencher.iter(|| schedule.run(app.world_mut()));
|
||||
});
|
||||
|
||||
let mut schedule = Schedule::default();
|
||||
schedule.set_executor_kind(bevy_ecs::schedule::ExecutorKind::MultiThreaded);
|
||||
group.bench_function("MultiThreaded", |bencher| {
|
||||
bencher.iter(|| schedule.run(app.world_mut()));
|
||||
});
|
||||
|
||||
let mut schedule = Schedule::default();
|
||||
schedule.set_executor_kind(bevy_ecs::schedule::ExecutorKind::Simple);
|
||||
group.bench_function("Simple", |bencher| {
|
||||
bencher.iter(|| schedule.run(app.world_mut()));
|
||||
});
|
||||
group.finish();
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
system::{Command, CommandQueue, Commands},
|
||||
world::World,
|
||||
system::Commands,
|
||||
world::{Command, CommandQueue, World},
|
||||
};
|
||||
use criterion::{black_box, Criterion};
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use bevy_ecs::entity::Entity;
|
||||
use bevy_utils::EntityHashSet;
|
||||
use bevy_ecs::entity::{Entity, EntityHashSet};
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
@ -28,7 +27,7 @@ fn make_entity(rng: &mut impl Rng, size: usize) -> Entity {
|
|||
e
|
||||
}
|
||||
|
||||
fn entity_set_build_and_lookup(c: &mut Criterion) {
|
||||
pub fn entity_set_build_and_lookup(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("entity_hash");
|
||||
for size in SIZES {
|
||||
// Get some random-but-consistent entities to use for all the benches below.
|
|
@ -1,13 +1,17 @@
|
|||
use criterion::criterion_group;
|
||||
|
||||
mod commands;
|
||||
mod spawn;
|
||||
mod world_get;
|
||||
|
||||
use commands::*;
|
||||
|
||||
mod spawn;
|
||||
use spawn::*;
|
||||
|
||||
mod world_get;
|
||||
use world_get::*;
|
||||
|
||||
mod entity_hash;
|
||||
use entity_hash::*;
|
||||
|
||||
criterion_group!(
|
||||
world_benches,
|
||||
empty_commands,
|
||||
|
@ -24,10 +28,9 @@ criterion_group!(
|
|||
world_query_iter,
|
||||
world_query_for_each,
|
||||
world_spawn,
|
||||
query_get_component_simple,
|
||||
query_get_component,
|
||||
query_get,
|
||||
query_get_many::<2>,
|
||||
query_get_many::<5>,
|
||||
query_get_many::<10>,
|
||||
entity_set_build_and_lookup
|
||||
);
|
||||
|
|
|
@ -2,7 +2,6 @@ use bevy_ecs::{
|
|||
bundle::Bundle,
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
prelude::*,
|
||||
system::{Query, SystemState},
|
||||
world::World,
|
||||
};
|
||||
|
@ -257,104 +256,6 @@ pub fn world_query_for_each(criterion: &mut Criterion) {
|
|||
group.finish();
|
||||
}
|
||||
|
||||
pub fn query_get_component_simple(criterion: &mut Criterion) {
|
||||
#[derive(Component)]
|
||||
struct A(f32);
|
||||
|
||||
let mut group = criterion.benchmark_group("query_get_component_simple");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
|
||||
group.bench_function("unchecked", |bencher| {
|
||||
let mut world = World::new();
|
||||
|
||||
let entity = world.spawn(A(0.0)).id();
|
||||
let mut query = world.query::<&mut A>();
|
||||
|
||||
let world_cell = world.as_unsafe_world_cell();
|
||||
bencher.iter(|| {
|
||||
for _x in 0..100000 {
|
||||
let mut a = unsafe { query.get_unchecked(world_cell, entity).unwrap() };
|
||||
a.0 += 1.0;
|
||||
}
|
||||
});
|
||||
});
|
||||
group.bench_function("system", |bencher| {
|
||||
let mut world = World::new();
|
||||
|
||||
let entity = world.spawn(A(0.0)).id();
|
||||
fn query_system(In(entity): In<Entity>, mut query: Query<&mut A>) {
|
||||
for _ in 0..100_000 {
|
||||
let mut a = query.get_mut(entity).unwrap();
|
||||
a.0 += 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
let mut system = IntoSystem::into_system(query_system);
|
||||
system.initialize(&mut world);
|
||||
system.update_archetype_component_access(world.as_unsafe_world_cell());
|
||||
|
||||
bencher.iter(|| system.run(entity, &mut world));
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn query_get_component(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group("query_get_component");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut entities: Vec<_> = world
|
||||
.spawn_batch((0..entity_count).map(|_| Table::default()))
|
||||
.collect();
|
||||
entities.shuffle(&mut deterministic_rand());
|
||||
let mut query = SystemState::<Query<&Table>>::new(&mut world);
|
||||
let query = query.get(&world);
|
||||
|
||||
bencher.iter(|| {
|
||||
let mut count = 0;
|
||||
for comp in entities
|
||||
.iter()
|
||||
.flat_map(|&e| query.get_component::<Table>(e))
|
||||
{
|
||||
black_box(comp);
|
||||
count += 1;
|
||||
black_box(count);
|
||||
}
|
||||
assert_eq!(black_box(count), entity_count);
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_sparse", entity_count), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut entities: Vec<_> = world
|
||||
.spawn_batch((0..entity_count).map(|_| Sparse::default()))
|
||||
.collect();
|
||||
entities.shuffle(&mut deterministic_rand());
|
||||
let mut query = SystemState::<Query<&Sparse>>::new(&mut world);
|
||||
let query = query.get(&world);
|
||||
|
||||
bencher.iter(|| {
|
||||
let mut count = 0;
|
||||
for comp in entities
|
||||
.iter()
|
||||
.flat_map(|&e| query.get_component::<Sparse>(e))
|
||||
{
|
||||
black_box(comp);
|
||||
count += 1;
|
||||
black_box(count);
|
||||
}
|
||||
assert_eq!(black_box(count), entity_count);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn query_get(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group("query_get");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
|
|
19
benches/benches/bevy_render/render_layers.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
use bevy_render::view::RenderLayers;
|
||||
|
||||
fn render_layers(c: &mut Criterion) {
|
||||
c.bench_function("layers_intersect", |b| {
|
||||
let layer_a = RenderLayers::layer(1).with(2);
|
||||
let layer_b = RenderLayers::layer(1);
|
||||
b.iter(|| {
|
||||
black_box(layer_a.intersects(&layer_b))
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
render_layers,
|
||||
);
|
||||
criterion_main!(benches);
|
15
benches/benches/bevy_render/torus.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
use bevy_render::mesh::TorusMeshBuilder;
|
||||
|
||||
fn torus(c: &mut Criterion) {
|
||||
c.bench_function("build_torus", |b| {
|
||||
b.iter(|| black_box(TorusMeshBuilder::new(black_box(0.5),black_box(1.0))));
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
torus,
|
||||
);
|
||||
criterion_main!(benches);
|
13
clippy.toml
|
@ -1 +1,12 @@
|
|||
doc-valid-idents = ["GilRs", "glTF", "sRGB", "VSync", "WebGL2", "WebGPU", ".."]
|
||||
doc-valid-idents = [
|
||||
"GilRs",
|
||||
"glTF",
|
||||
"MacOS",
|
||||
"NVidia",
|
||||
"OpenXR",
|
||||
"sRGB",
|
||||
"VSync",
|
||||
"WebGL2",
|
||||
"WebGPU",
|
||||
"..",
|
||||
]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "bevy_a11y"
|
||||
version = "0.12.0"
|
||||
version = "0.14.0-dev"
|
||||
edition = "2021"
|
||||
description = "Provides accessibility support for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
|
@ -10,11 +10,15 @@ keywords = ["bevy", "accessibility", "a11y"]
|
|||
|
||||
[dependencies]
|
||||
# bevy
|
||||
bevy_app = { path = "../bevy_app", version = "0.12.0" }
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.12.0" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.12.0" }
|
||||
bevy_app = { path = "../bevy_app", version = "0.14.0-dev" }
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.14.0-dev" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.14.0-dev" }
|
||||
|
||||
accesskit = "0.12"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
|
7
crates/bevy_a11y/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Bevy A11Y (Accessibility)
|
||||
|
||||
[![License](https://img.shields.io/badge/license-MIT%2FApache-blue.svg)](https://github.com/bevyengine/bevy#license)
|
||||
[![Crates.io](https://img.shields.io/crates/v/bevy_a11y.svg)](https://crates.io/crates/bevy_a11y)
|
||||
[![Downloads](https://img.shields.io/crates/d/bevy_a11y.svg)](https://crates.io/crates/bevy_a11y)
|
||||
[![Docs](https://docs.rs/bevy_a11y/badge.svg)](https://docs.rs/bevy_a11y/latest/bevy_a11y/)
|
||||
[![Discord](https://img.shields.io/discord/691052431525675048.svg?label=&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/bevy)
|
|
@ -1,6 +1,11 @@
|
|||
//! Accessibility for Bevy
|
||||
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
//! Accessibility for Bevy
|
||||
|
||||
use std::sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "bevy_animation"
|
||||
version = "0.12.0"
|
||||
version = "0.14.0-dev"
|
||||
edition = "2021"
|
||||
description = "Provides animation functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
|
@ -10,19 +10,37 @@ keywords = ["bevy"]
|
|||
|
||||
[dependencies]
|
||||
# bevy
|
||||
bevy_app = { path = "../bevy_app", version = "0.12.0" }
|
||||
bevy_asset = { path = "../bevy_asset", version = "0.12.0" }
|
||||
bevy_core = { path = "../bevy_core", version = "0.12.0" }
|
||||
bevy_math = { path = "../bevy_math", version = "0.12.0" }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.12.0", features = [
|
||||
bevy_app = { path = "../bevy_app", version = "0.14.0-dev" }
|
||||
bevy_asset = { path = "../bevy_asset", version = "0.14.0-dev" }
|
||||
bevy_color = { path = "../bevy_color", version = "0.14.0-dev" }
|
||||
bevy_core = { path = "../bevy_core", version = "0.14.0-dev" }
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.14.0-dev" }
|
||||
bevy_log = { path = "../bevy_log", version = "0.14.0-dev" }
|
||||
bevy_math = { path = "../bevy_math", version = "0.14.0-dev" }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.14.0-dev", features = [
|
||||
"bevy",
|
||||
"petgraph",
|
||||
] }
|
||||
bevy_render = { path = "../bevy_render", version = "0.12.0" }
|
||||
bevy_time = { path = "../bevy_time", version = "0.12.0" }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.12.0" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.12.0" }
|
||||
bevy_transform = { path = "../bevy_transform", version = "0.12.0" }
|
||||
bevy_hierarchy = { path = "../bevy_hierarchy", version = "0.12.0" }
|
||||
bevy_render = { path = "../bevy_render", version = "0.14.0-dev" }
|
||||
bevy_time = { path = "../bevy_time", version = "0.14.0-dev" }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.14.0-dev" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.14.0-dev" }
|
||||
bevy_transform = { path = "../bevy_transform", version = "0.14.0-dev" }
|
||||
bevy_hierarchy = { path = "../bevy_hierarchy", version = "0.14.0-dev" }
|
||||
|
||||
# other
|
||||
fixedbitset = "0.5"
|
||||
petgraph = { version = "0.6", features = ["serde-1"] }
|
||||
ron = "0.8"
|
||||
serde = "1"
|
||||
blake3 = { version = "1.0" }
|
||||
thiserror = "1"
|
||||
thread_local = "1"
|
||||
uuid = { version = "1.7", features = ["v4"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
|
7
crates/bevy_animation/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Bevy Animation
|
||||
|
||||
[![License](https://img.shields.io/badge/license-MIT%2FApache-blue.svg)](https://github.com/bevyengine/bevy#license)
|
||||
[![Crates.io](https://img.shields.io/crates/v/bevy_animation.svg)](https://crates.io/crates/bevy_animation)
|
||||
[![Downloads](https://img.shields.io/crates/d/bevy_animation.svg)](https://crates.io/crates/bevy_animation)
|
||||
[![Docs](https://docs.rs/bevy_animation/badge.svg)](https://docs.rs/bevy_animation/latest/bevy_animation/)
|
||||
[![Discord](https://img.shields.io/discord/691052431525675048.svg?label=&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/bevy)
|
|
@ -1,9 +1,9 @@
|
|||
use crate::util;
|
||||
use bevy_color::{Laba, LinearRgba, Oklaba, Srgba, Xyza};
|
||||
use bevy_ecs::world::World;
|
||||
use bevy_math::*;
|
||||
use bevy_reflect::Reflect;
|
||||
use bevy_transform::prelude::Transform;
|
||||
use bevy_utils::FloatOrd;
|
||||
|
||||
/// An individual input for [`Animatable::blend`].
|
||||
pub struct BlendInput<T> {
|
||||
|
@ -57,6 +57,31 @@ macro_rules! impl_float_animatable {
|
|||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_color_animatable {
|
||||
($ty: ident) => {
|
||||
impl Animatable for $ty {
|
||||
#[inline]
|
||||
fn interpolate(a: &Self, b: &Self, t: f32) -> Self {
|
||||
let value = *a * (1. - t) + *b * t;
|
||||
value
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn blend(inputs: impl Iterator<Item = BlendInput<Self>>) -> Self {
|
||||
let mut value = Default::default();
|
||||
for input in inputs {
|
||||
if input.additive {
|
||||
value += input.weight * input.value;
|
||||
} else {
|
||||
value = Self::interpolate(&value, &input.value, input.weight);
|
||||
}
|
||||
}
|
||||
value
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_float_animatable!(f32, f32);
|
||||
impl_float_animatable!(Vec2, f32);
|
||||
impl_float_animatable!(Vec3A, f32);
|
||||
|
@ -67,6 +92,12 @@ impl_float_animatable!(DVec2, f64);
|
|||
impl_float_animatable!(DVec3, f64);
|
||||
impl_float_animatable!(DVec4, f64);
|
||||
|
||||
impl_color_animatable!(LinearRgba);
|
||||
impl_color_animatable!(Laba);
|
||||
impl_color_animatable!(Oklaba);
|
||||
impl_color_animatable!(Srgba);
|
||||
impl_color_animatable!(Xyza);
|
||||
|
||||
// Vec3 is special cased to use Vec3A internally for blending
|
||||
impl Animatable for Vec3 {
|
||||
#[inline]
|
||||
|
@ -142,8 +173,7 @@ impl Animatable for Transform {
|
|||
}
|
||||
|
||||
impl Animatable for Quat {
|
||||
/// Performs an nlerp, because it's cheaper and easier to combine with other animations,
|
||||
/// reference: <http://number-none.com/product/Understanding%20Slerp,%20Then%20Not%20Using%20It/>
|
||||
/// Performs a slerp to smoothly interpolate between quaternions.
|
||||
#[inline]
|
||||
fn interpolate(a: &Self, b: &Self, t: f32) -> Self {
|
||||
// We want to smoothly interpolate between the two quaternions by default,
|
||||
|
|
396
crates/bevy_animation/src/graph.rs
Normal file
|
@ -0,0 +1,396 @@
|
|||
//! The animation graph, which allows animations to be blended together.
|
||||
|
||||
use std::io::{self, Write};
|
||||
use std::ops::{Index, IndexMut};
|
||||
|
||||
use bevy_asset::io::Reader;
|
||||
use bevy_asset::{Asset, AssetId, AssetLoader, AssetPath, AsyncReadExt as _, Handle, LoadContext};
|
||||
use bevy_reflect::{Reflect, ReflectSerialize};
|
||||
use petgraph::graph::{DiGraph, NodeIndex};
|
||||
use ron::de::SpannedError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::AnimationClip;
|
||||
|
||||
/// A graph structure that describes how animation clips are to be blended
|
||||
/// together.
|
||||
///
|
||||
/// Applications frequently want to be able to play multiple animations at once
|
||||
/// and to fine-tune the influence that animations have on a skinned mesh. Bevy
|
||||
/// uses an *animation graph* to store this information. Animation graphs are a
|
||||
/// directed acyclic graph (DAG) that describes how animations are to be
|
||||
/// weighted and combined together. Every frame, Bevy evaluates the graph from
|
||||
/// the root and blends the animations together in a bottom-up fashion to
|
||||
/// produce the final pose.
|
||||
///
|
||||
/// There are two types of nodes: *blend nodes* and *clip nodes*, both of which
|
||||
/// can have an associated weight. Blend nodes have no associated animation clip
|
||||
/// and simply affect the weights of all their descendant nodes. Clip nodes
|
||||
/// specify an animation clip to play. When a graph is created, it starts with
|
||||
/// only a single blend node, the root node.
|
||||
///
|
||||
/// For example, consider the following graph:
|
||||
///
|
||||
/// ```text
|
||||
/// ┌────────────┐
|
||||
/// │ │
|
||||
/// │ Idle ├─────────────────────┐
|
||||
/// │ │ │
|
||||
/// └────────────┘ │
|
||||
/// │
|
||||
/// ┌────────────┐ │ ┌────────────┐
|
||||
/// │ │ │ │ │
|
||||
/// │ Run ├──┐ ├──┤ Root │
|
||||
/// │ │ │ ┌────────────┐ │ │ │
|
||||
/// └────────────┘ │ │ Blend │ │ └────────────┘
|
||||
/// ├──┤ ├──┘
|
||||
/// ┌────────────┐ │ │ 0.5 │
|
||||
/// │ │ │ └────────────┘
|
||||
/// │ Walk ├──┘
|
||||
/// │ │
|
||||
/// └────────────┘
|
||||
/// ```
|
||||
///
|
||||
/// In this case, assuming that Idle, Run, and Walk are all playing with weight
|
||||
/// 1.0, the Run and Walk animations will be equally blended together, then
|
||||
/// their weights will be halved and finally blended with the Idle animation.
|
||||
/// Thus the weight of Run and Walk are effectively half of the weight of Idle.
|
||||
///
|
||||
/// Animation graphs are assets and can be serialized to and loaded from [RON]
|
||||
/// files. Canonically, such files have an `.animgraph.ron` extension.
|
||||
///
|
||||
/// The animation graph implements [RFC 51]. See that document for more
|
||||
/// information.
|
||||
///
|
||||
/// [RON]: https://github.com/ron-rs/ron
|
||||
///
|
||||
/// [RFC 51]: https://github.com/bevyengine/rfcs/blob/main/rfcs/51-animation-composition.md
|
||||
#[derive(Asset, Reflect, Clone, Debug, Serialize)]
|
||||
#[reflect(Serialize, Debug)]
|
||||
#[serde(into = "SerializedAnimationGraph")]
|
||||
pub struct AnimationGraph {
|
||||
/// The `petgraph` data structure that defines the animation graph.
|
||||
pub graph: AnimationDiGraph,
|
||||
/// The index of the root node in the animation graph.
|
||||
pub root: NodeIndex,
|
||||
}
|
||||
|
||||
/// A type alias for the `petgraph` data structure that defines the animation
|
||||
/// graph.
|
||||
pub type AnimationDiGraph = DiGraph<AnimationGraphNode, (), u32>;
|
||||
|
||||
/// The index of either an animation or blend node in the animation graph.
|
||||
///
|
||||
/// These indices are the way that [`crate::AnimationPlayer`]s identify
|
||||
/// particular animations.
|
||||
pub type AnimationNodeIndex = NodeIndex<u32>;
|
||||
|
||||
/// An individual node within an animation graph.
|
||||
///
|
||||
/// If `clip` is present, this is a *clip node*. Otherwise, it's a *blend node*.
|
||||
/// Both clip and blend nodes can have weights, and those weights are propagated
|
||||
/// down to descendants.
|
||||
#[derive(Clone, Reflect, Debug)]
|
||||
pub struct AnimationGraphNode {
|
||||
/// The animation clip associated with this node, if any.
|
||||
///
|
||||
/// If the clip is present, this node is an *animation clip node*.
|
||||
/// Otherwise, this node is a *blend node*.
|
||||
pub clip: Option<Handle<AnimationClip>>,
|
||||
|
||||
/// The weight of this node.
|
||||
///
|
||||
/// Weights are propagated down to descendants. Thus if an animation clip
|
||||
/// has weight 0.3 and its parent blend node has weight 0.6, the computed
|
||||
/// weight of the animation clip is 0.18.
|
||||
pub weight: f32,
|
||||
}
|
||||
|
||||
/// An [`AssetLoader`] that can load [`AnimationGraph`]s as assets.
|
||||
///
|
||||
/// The canonical extension for [`AnimationGraph`]s is `.animgraph.ron`. Plain
|
||||
/// `.animgraph` is supported as well.
|
||||
#[derive(Default)]
|
||||
pub struct AnimationGraphAssetLoader;
|
||||
|
||||
/// Various errors that can occur when serializing or deserializing animation
|
||||
/// graphs to and from RON, respectively.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AnimationGraphLoadError {
|
||||
/// An I/O error occurred.
|
||||
#[error("I/O")]
|
||||
Io(#[from] io::Error),
|
||||
/// An error occurred in RON serialization or deserialization.
|
||||
#[error("RON serialization")]
|
||||
Ron(#[from] ron::Error),
|
||||
/// An error occurred in RON deserialization, and the location of the error
|
||||
/// is supplied.
|
||||
#[error("RON serialization")]
|
||||
SpannedRon(#[from] SpannedError),
|
||||
}
|
||||
|
||||
/// A version of [`AnimationGraph`] suitable for serializing as an asset.
|
||||
///
|
||||
/// Animation nodes can refer to external animation clips, and the [`AssetId`]
|
||||
/// is typically not sufficient to identify the clips, since the
|
||||
/// [`bevy_asset::AssetServer`] assigns IDs in unpredictable ways. That fact
|
||||
/// motivates this type, which replaces the `Handle<AnimationClip>` with an
|
||||
/// asset path. Loading an animation graph via the [`bevy_asset::AssetServer`]
|
||||
/// actually loads a serialized instance of this type, as does serializing an
|
||||
/// [`AnimationGraph`] through `serde`.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SerializedAnimationGraph {
|
||||
/// Corresponds to the `graph` field on [`AnimationGraph`].
|
||||
pub graph: DiGraph<SerializedAnimationGraphNode, (), u32>,
|
||||
/// Corresponds to the `root` field on [`AnimationGraph`].
|
||||
pub root: NodeIndex,
|
||||
}
|
||||
|
||||
/// A version of [`AnimationGraphNode`] suitable for serializing as an asset.
|
||||
///
|
||||
/// See the comments in [`SerializedAnimationGraph`] for more information.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SerializedAnimationGraphNode {
|
||||
/// Corresponds to the `clip` field on [`AnimationGraphNode`].
|
||||
pub clip: Option<SerializedAnimationClip>,
|
||||
/// Corresponds to the `weight` field on [`AnimationGraphNode`].
|
||||
pub weight: f32,
|
||||
}
|
||||
|
||||
/// A version of `Handle<AnimationClip>` suitable for serializing as an asset.
|
||||
///
|
||||
/// This replaces any handle that has a path with an [`AssetPath`]. Failing
|
||||
/// that, the asset ID is serialized directly.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum SerializedAnimationClip {
|
||||
/// Records an asset path.
|
||||
AssetPath(AssetPath<'static>),
|
||||
/// The fallback that records an asset ID.
|
||||
///
|
||||
/// Because asset IDs can change, this should not be relied upon. Prefer to
|
||||
/// use asset paths where possible.
|
||||
AssetId(AssetId<AnimationClip>),
|
||||
}
|
||||
|
||||
impl AnimationGraph {
|
||||
/// Creates a new animation graph with a root node and no other nodes.
|
||||
pub fn new() -> Self {
|
||||
let mut graph = DiGraph::default();
|
||||
let root = graph.add_node(AnimationGraphNode::default());
|
||||
Self { graph, root }
|
||||
}
|
||||
|
||||
/// A convenience function for creating an [`AnimationGraph`] from a single
|
||||
/// [`AnimationClip`].
|
||||
///
|
||||
/// The clip will be a direct child of the root with weight 1.0. Both the
|
||||
/// graph and the index of the added node are returned as a tuple.
|
||||
pub fn from_clip(clip: Handle<AnimationClip>) -> (Self, AnimationNodeIndex) {
|
||||
let mut graph = Self::new();
|
||||
let node_index = graph.add_clip(clip, 1.0, graph.root);
|
||||
(graph, node_index)
|
||||
}
|
||||
|
||||
/// Adds an [`AnimationClip`] to the animation graph with the given weight
|
||||
/// and returns its index.
|
||||
///
|
||||
/// The animation clip will be the child of the given parent.
|
||||
pub fn add_clip(
|
||||
&mut self,
|
||||
clip: Handle<AnimationClip>,
|
||||
weight: f32,
|
||||
parent: AnimationNodeIndex,
|
||||
) -> AnimationNodeIndex {
|
||||
let node_index = self.graph.add_node(AnimationGraphNode {
|
||||
clip: Some(clip),
|
||||
weight,
|
||||
});
|
||||
self.graph.add_edge(parent, node_index, ());
|
||||
node_index
|
||||
}
|
||||
|
||||
/// A convenience method to add multiple [`AnimationClip`]s to the animation
|
||||
/// graph.
|
||||
///
|
||||
/// All of the animation clips will have the same weight and will be
|
||||
/// parented to the same node.
|
||||
///
|
||||
/// Returns the indices of the new nodes.
|
||||
pub fn add_clips<'a, I>(
|
||||
&'a mut self,
|
||||
clips: I,
|
||||
weight: f32,
|
||||
parent: AnimationNodeIndex,
|
||||
) -> impl Iterator<Item = AnimationNodeIndex> + 'a
|
||||
where
|
||||
I: IntoIterator<Item = Handle<AnimationClip>>,
|
||||
<I as std::iter::IntoIterator>::IntoIter: 'a,
|
||||
{
|
||||
clips
|
||||
.into_iter()
|
||||
.map(move |clip| self.add_clip(clip, weight, parent))
|
||||
}
|
||||
|
||||
/// Adds a blend node to the animation graph with the given weight and
|
||||
/// returns its index.
|
||||
///
|
||||
/// The blend node will be placed under the supplied `parent` node. During
|
||||
/// animation evaluation, the descendants of this blend node will have their
|
||||
/// weights multiplied by the weight of the blend.
|
||||
pub fn add_blend(&mut self, weight: f32, parent: AnimationNodeIndex) -> AnimationNodeIndex {
|
||||
let node_index = self
|
||||
.graph
|
||||
.add_node(AnimationGraphNode { clip: None, weight });
|
||||
self.graph.add_edge(parent, node_index, ());
|
||||
node_index
|
||||
}
|
||||
|
||||
/// Adds an edge from the edge `from` to `to`, making `to` a child of
|
||||
/// `from`.
|
||||
///
|
||||
/// The behavior is unspecified if adding this produces a cycle in the
|
||||
/// graph.
|
||||
pub fn add_edge(&mut self, from: NodeIndex, to: NodeIndex) {
|
||||
self.graph.add_edge(from, to, ());
|
||||
}
|
||||
|
||||
/// Removes an edge between `from` and `to` if it exists.
|
||||
///
|
||||
/// Returns true if the edge was successfully removed or false if no such
|
||||
/// edge existed.
|
||||
pub fn remove_edge(&mut self, from: NodeIndex, to: NodeIndex) -> bool {
|
||||
self.graph
|
||||
.find_edge(from, to)
|
||||
.map(|edge| self.graph.remove_edge(edge))
|
||||
.is_some()
|
||||
}
|
||||
|
||||
/// Returns the [`AnimationGraphNode`] associated with the given index.
|
||||
///
|
||||
/// If no node with the given index exists, returns `None`.
|
||||
pub fn get(&self, animation: AnimationNodeIndex) -> Option<&AnimationGraphNode> {
|
||||
self.graph.node_weight(animation)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the [`AnimationGraphNode`] associated
|
||||
/// with the given index.
|
||||
///
|
||||
/// If no node with the given index exists, returns `None`.
|
||||
pub fn get_mut(&mut self, animation: AnimationNodeIndex) -> Option<&mut AnimationGraphNode> {
|
||||
self.graph.node_weight_mut(animation)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the [`AnimationGraphNode`]s in this graph.
|
||||
pub fn nodes(&self) -> impl Iterator<Item = AnimationNodeIndex> {
|
||||
self.graph.node_indices()
|
||||
}
|
||||
|
||||
/// Serializes the animation graph to the given [`Write`]r in RON format.
|
||||
///
|
||||
/// If writing to a file, it can later be loaded with the
|
||||
/// [`AnimationGraphAssetLoader`] to reconstruct the graph.
|
||||
pub fn save<W>(&self, writer: &mut W) -> Result<(), AnimationGraphLoadError>
|
||||
where
|
||||
W: Write,
|
||||
{
|
||||
let mut ron_serializer = ron::ser::Serializer::new(writer, None)?;
|
||||
Ok(self.serialize(&mut ron_serializer)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<AnimationNodeIndex> for AnimationGraph {
|
||||
type Output = AnimationGraphNode;
|
||||
|
||||
fn index(&self, index: AnimationNodeIndex) -> &Self::Output {
|
||||
&self.graph[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexMut<AnimationNodeIndex> for AnimationGraph {
|
||||
fn index_mut(&mut self, index: AnimationNodeIndex) -> &mut Self::Output {
|
||||
&mut self.graph[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AnimationGraphNode {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
clip: None,
|
||||
weight: 1.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AnimationGraph {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AssetLoader for AnimationGraphAssetLoader {
|
||||
type Asset = AnimationGraph;
|
||||
|
||||
type Settings = ();
|
||||
|
||||
type Error = AnimationGraphLoadError;
|
||||
|
||||
async fn load<'a>(
|
||||
&'a self,
|
||||
reader: &'a mut Reader<'_>,
|
||||
_: &'a Self::Settings,
|
||||
load_context: &'a mut LoadContext<'_>,
|
||||
) -> Result<Self::Asset, Self::Error> {
|
||||
let mut bytes = Vec::new();
|
||||
reader.read_to_end(&mut bytes).await?;
|
||||
|
||||
// Deserialize a `SerializedAnimationGraph` directly, so that we can
|
||||
// get the list of the animation clips it refers to and load them.
|
||||
let mut deserializer = ron::de::Deserializer::from_bytes(&bytes)?;
|
||||
let serialized_animation_graph = SerializedAnimationGraph::deserialize(&mut deserializer)
|
||||
.map_err(|err| deserializer.span_error(err))?;
|
||||
|
||||
// Load all `AssetPath`s to convert from a
|
||||
// `SerializedAnimationGraph` to a real `AnimationGraph`.
|
||||
Ok(AnimationGraph {
|
||||
graph: serialized_animation_graph.graph.map(
|
||||
|_, serialized_node| AnimationGraphNode {
|
||||
clip: serialized_node.clip.as_ref().map(|clip| match clip {
|
||||
SerializedAnimationClip::AssetId(asset_id) => Handle::Weak(*asset_id),
|
||||
SerializedAnimationClip::AssetPath(asset_path) => {
|
||||
load_context.load(asset_path)
|
||||
}
|
||||
}),
|
||||
weight: serialized_node.weight,
|
||||
},
|
||||
|_, _| (),
|
||||
),
|
||||
root: serialized_animation_graph.root,
|
||||
})
|
||||
}
|
||||
|
||||
fn extensions(&self) -> &[&str] {
|
||||
&["animgraph", "animgraph.ron"]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AnimationGraph> for SerializedAnimationGraph {
|
||||
fn from(animation_graph: AnimationGraph) -> Self {
|
||||
// If any of the animation clips have paths, then serialize them as
|
||||
// `SerializedAnimationClip::AssetPath` so that the
|
||||
// `AnimationGraphAssetLoader` can load them.
|
||||
Self {
|
||||
graph: animation_graph.graph.map(
|
||||
|_, node| SerializedAnimationGraphNode {
|
||||
weight: node.weight,
|
||||
clip: node.clip.as_ref().map(|clip| match clip.path() {
|
||||
Some(path) => SerializedAnimationClip::AssetPath(path.clone()),
|
||||
None => SerializedAnimationClip::AssetId(clip.id()),
|
||||
}),
|
||||
},
|
||||
|_, _| (),
|
||||
),
|
||||
root: animation_graph.root,
|
||||
}
|
||||
}
|
||||
}
|
132
crates/bevy_animation/src/transition.rs
Normal file
|
@ -0,0 +1,132 @@
|
|||
//! Animation transitions.
|
||||
//!
|
||||
//! Please note that this is an unstable temporary API. It may be replaced by a
|
||||
//! state machine in the future.
|
||||
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
system::{Query, Res},
|
||||
};
|
||||
use bevy_reflect::Reflect;
|
||||
use bevy_time::Time;
|
||||
use bevy_utils::Duration;
|
||||
|
||||
use crate::{graph::AnimationNodeIndex, ActiveAnimation, AnimationPlayer};
|
||||
|
||||
/// Manages fade-out of animation blend factors, allowing for smooth transitions
|
||||
/// between animations.
|
||||
///
|
||||
/// To use this component, place it on the same entity as the
|
||||
/// [`AnimationPlayer`] and [`bevy_asset::Handle<AnimationGraph>`]. It'll take
|
||||
/// responsibility for adjusting the weight on the [`ActiveAnimation`] in order
|
||||
/// to fade out animations smoothly.
|
||||
///
|
||||
/// When using an [`AnimationTransitions`] component, you should play all
|
||||
/// animations through the [`AnimationTransitions::play`] method, rather than by
|
||||
/// directly manipulating the [`AnimationPlayer`]. Playing animations through
|
||||
/// the [`AnimationPlayer`] directly will cause the [`AnimationTransitions`]
|
||||
/// component to get confused about which animation is the "main" animation, and
|
||||
/// transitions will usually be incorrect as a result.
|
||||
#[derive(Component, Default, Reflect)]
|
||||
pub struct AnimationTransitions {
|
||||
main_animation: Option<AnimationNodeIndex>,
|
||||
transitions: Vec<AnimationTransition>,
|
||||
}
|
||||
|
||||
/// An animation that is being faded out as part of a transition
|
||||
#[derive(Debug, Reflect)]
|
||||
pub struct AnimationTransition {
|
||||
/// The current weight. Starts at 1.0 and goes to 0.0 during the fade-out.
|
||||
current_weight: f32,
|
||||
/// How much to decrease `current_weight` per second
|
||||
weight_decline_per_sec: f32,
|
||||
/// The animation that is being faded out
|
||||
animation: AnimationNodeIndex,
|
||||
}
|
||||
|
||||
impl AnimationTransitions {
|
||||
/// Creates a new [`AnimationTransitions`] component, ready to be added to
|
||||
/// an entity with an [`AnimationPlayer`].
|
||||
pub fn new() -> AnimationTransitions {
|
||||
AnimationTransitions::default()
|
||||
}
|
||||
|
||||
/// Plays a new animation on the given [`AnimationPlayer`], fading out any
|
||||
/// existing animations that were already playing over the
|
||||
/// `transition_duration`.
|
||||
///
|
||||
/// Pass [`Duration::ZERO`] to instantly switch to a new animation, avoiding
|
||||
/// any transition.
|
||||
pub fn play<'p>(
|
||||
&mut self,
|
||||
player: &'p mut AnimationPlayer,
|
||||
new_animation: AnimationNodeIndex,
|
||||
transition_duration: Duration,
|
||||
) -> &'p mut ActiveAnimation {
|
||||
if let Some(old_animation_index) = self.main_animation.replace(new_animation) {
|
||||
if let Some(old_animation) = player.animation_mut(old_animation_index) {
|
||||
if !old_animation.is_paused() {
|
||||
self.transitions.push(AnimationTransition {
|
||||
current_weight: old_animation.weight,
|
||||
weight_decline_per_sec: 1.0 / transition_duration.as_secs_f32(),
|
||||
animation: old_animation_index,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.main_animation = Some(new_animation);
|
||||
player.start(new_animation)
|
||||
}
|
||||
}
|
||||
|
||||
/// A system that alters the weight of currently-playing transitions based on
|
||||
/// the current time and decline amount.
|
||||
pub fn advance_transitions(
|
||||
mut query: Query<(&mut AnimationTransitions, &mut AnimationPlayer)>,
|
||||
time: Res<Time>,
|
||||
) {
|
||||
// We use a "greedy layer" system here. The top layer (most recent
|
||||
// transition) gets as much as weight as it wants, and the remaining amount
|
||||
// is divided between all the other layers, eventually culminating in the
|
||||
// currently-playing animation receiving whatever's left. This results in a
|
||||
// nicely normalized weight.
|
||||
let mut remaining_weight = 1.0;
|
||||
for (mut animation_transitions, mut player) in query.iter_mut() {
|
||||
for transition in &mut animation_transitions.transitions.iter_mut().rev() {
|
||||
// Decrease weight.
|
||||
transition.current_weight = (transition.current_weight
|
||||
- transition.weight_decline_per_sec * time.delta_seconds())
|
||||
.max(0.0);
|
||||
|
||||
// Update weight.
|
||||
let Some(ref mut animation) = player.animation_mut(transition.animation) else {
|
||||
continue;
|
||||
};
|
||||
animation.weight = transition.current_weight * remaining_weight;
|
||||
remaining_weight -= animation.weight;
|
||||
}
|
||||
|
||||
if let Some(main_animation_index) = animation_transitions.main_animation {
|
||||
if let Some(ref mut animation) = player.animation_mut(main_animation_index) {
|
||||
animation.weight = remaining_weight;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A system that removed transitions that have completed from the
|
||||
/// [`AnimationTransitions`] object.
|
||||
pub fn expire_completed_transitions(
|
||||
mut query: Query<(&mut AnimationTransitions, &mut AnimationPlayer)>,
|
||||
) {
|
||||
for (mut animation_transitions, mut player) in query.iter_mut() {
|
||||
animation_transitions.transitions.retain(|transition| {
|
||||
let expire = transition.current_weight <= 0.0;
|
||||
if expire {
|
||||
player.stop(transition.animation);
|
||||
}
|
||||
!expire
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "bevy_app"
|
||||
version = "0.12.0"
|
||||
version = "0.14.0-dev"
|
||||
edition = "2021"
|
||||
description = "Provides core App functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
|
@ -10,28 +10,34 @@ keywords = ["bevy"]
|
|||
|
||||
[features]
|
||||
trace = []
|
||||
bevy_ci_testing = ["serde", "ron"]
|
||||
bevy_debug_stepping = []
|
||||
default = ["bevy_reflect", "bevy_debug_stepping"]
|
||||
default = ["bevy_reflect", "bevy_state"]
|
||||
bevy_reflect = ["dep:bevy_reflect", "bevy_ecs/bevy_reflect"]
|
||||
serialize = ["bevy_ecs/serde"]
|
||||
bevy_state = ["dep:bevy_state"]
|
||||
|
||||
[dependencies]
|
||||
# bevy
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.12.0" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.12.0", default-features = false }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.12.0", optional = true }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.12.0" }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.12.0" }
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.14.0-dev" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.14.0-dev", default-features = false }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.14.0-dev", optional = true }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.14.0-dev" }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.14.0-dev" }
|
||||
bevy_state = { path = "../bevy_state", optional = true, version = "0.14.0-dev" }
|
||||
|
||||
# other
|
||||
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||
ron = { version = "0.8.0", optional = true }
|
||||
downcast-rs = "1.2.0"
|
||||
|
||||
thiserror = "1.0"
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
wasm-bindgen = { version = "0.2" }
|
||||
web-sys = { version = "0.3", features = ["Window"] }
|
||||
console_error_panic_hook = "0.1.6"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
//! Utilities for testing in CI environments.
|
||||
|
||||
use crate::{app::AppExit, App, Update};
|
||||
use serde::Deserialize;
|
||||
|
||||
use bevy_ecs::prelude::Resource;
|
||||
use bevy_utils::tracing::info;
|
||||
|
||||
/// A configuration struct for automated CI testing.
|
||||
///
|
||||
/// It gets used when the `bevy_ci_testing` feature is enabled to automatically
|
||||
/// exit a Bevy app when run through the CI. This is needed because otherwise
|
||||
/// Bevy apps would be stuck in the game loop and wouldn't allow the CI to progress.
|
||||
#[derive(Deserialize, Resource)]
|
||||
pub struct CiTestingConfig {
|
||||
/// The number of frames after which Bevy should exit.
|
||||
pub exit_after: Option<u32>,
|
||||
/// The time in seconds to update for each frame.
|
||||
pub frame_time: Option<f32>,
|
||||
/// Frames at which to capture a screenshot.
|
||||
#[serde(default)]
|
||||
pub screenshot_frames: Vec<u32>,
|
||||
}
|
||||
|
||||
fn ci_testing_exit_after(
|
||||
mut current_frame: bevy_ecs::prelude::Local<u32>,
|
||||
ci_testing_config: bevy_ecs::prelude::Res<CiTestingConfig>,
|
||||
mut app_exit_events: bevy_ecs::event::EventWriter<AppExit>,
|
||||
) {
|
||||
if let Some(exit_after) = ci_testing_config.exit_after {
|
||||
if *current_frame > exit_after {
|
||||
app_exit_events.send(AppExit);
|
||||
info!("Exiting after {} frames. Test successful!", exit_after);
|
||||
}
|
||||
}
|
||||
*current_frame += 1;
|
||||
}
|
||||
|
||||
pub(crate) fn setup_app(app: &mut App) -> &mut App {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let config: CiTestingConfig = {
|
||||
let filename = std::env::var("CI_TESTING_CONFIG")
|
||||
.unwrap_or_else(|_| "ci_testing_config.ron".to_string());
|
||||
ron::from_str(
|
||||
&std::fs::read_to_string(filename)
|
||||
.expect("error reading CI testing configuration file"),
|
||||
)
|
||||
.expect("error deserializing CI testing configuration file")
|
||||
};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let config: CiTestingConfig = {
|
||||
let config = include_str!("../../../ci_testing_config.ron");
|
||||
ron::from_str(config).expect("error deserializing CI testing configuration file")
|
||||
};
|
||||
|
||||
app.insert_resource(config)
|
||||
.add_systems(Update, ci_testing_exit_after);
|
||||
|
||||
app
|
||||
}
|
|
@ -1,31 +1,39 @@
|
|||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![forbid(unsafe_code)]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
//! This crate is about everything concerning the highest-level, application layer of a Bevy app.
|
||||
|
||||
mod app;
|
||||
mod main_schedule;
|
||||
mod panic_handler;
|
||||
mod plugin;
|
||||
mod plugin_group;
|
||||
mod schedule_runner;
|
||||
|
||||
#[cfg(feature = "bevy_ci_testing")]
|
||||
pub mod ci_testing;
|
||||
mod sub_app;
|
||||
|
||||
pub use app::*;
|
||||
pub use bevy_derive::DynamicPlugin;
|
||||
pub use main_schedule::*;
|
||||
pub use panic_handler::*;
|
||||
pub use plugin::*;
|
||||
pub use plugin_group::*;
|
||||
pub use schedule_runner::*;
|
||||
pub use sub_app::*;
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod prelude {
|
||||
#[doc(hidden)]
|
||||
pub use crate::{
|
||||
app::App,
|
||||
app::{App, AppExit},
|
||||
main_schedule::{
|
||||
First, FixedFirst, FixedLast, FixedPostUpdate, FixedPreUpdate, FixedUpdate, Last, Main,
|
||||
PostStartup, PostUpdate, PreStartup, PreUpdate, SpawnScene, Startup, StateTransition,
|
||||
Update,
|
||||
PostStartup, PostUpdate, PreStartup, PreUpdate, SpawnScene, Startup, Update,
|
||||
},
|
||||
sub_app::SubApp,
|
||||
DynamicPlugin, Plugin, PluginGroup,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ use bevy_ecs::{
|
|||
system::{Local, Resource},
|
||||
world::{Mut, World},
|
||||
};
|
||||
#[cfg(feature = "bevy_state")]
|
||||
use bevy_state::state::StateTransition;
|
||||
|
||||
/// The schedule that contains the app logic that is evaluated each tick of [`App::update()`].
|
||||
///
|
||||
|
@ -27,13 +29,14 @@ use bevy_ecs::{
|
|||
/// # Rendering
|
||||
///
|
||||
/// Note rendering is not executed in the main schedule by default.
|
||||
/// Instead, rendering is performed in a separate [`SubApp`](crate::app::SubApp)
|
||||
/// Instead, rendering is performed in a separate [`SubApp`]
|
||||
/// which exchanges data with the main app in between the main schedule runs.
|
||||
///
|
||||
/// See [`RenderPlugin`] and [`PipelinedRenderingPlugin`] for more details.
|
||||
///
|
||||
/// [`RenderPlugin`]: https://docs.rs/bevy/latest/bevy/render/struct.RenderPlugin.html
|
||||
/// [`PipelinedRenderingPlugin`]: https://docs.rs/bevy/latest/bevy/render/pipelined_rendering/struct.PipelinedRenderingPlugin.html
|
||||
/// [`SubApp`]: crate::SubApp
|
||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Main;
|
||||
|
||||
|
@ -72,12 +75,6 @@ pub struct First;
|
|||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct PreUpdate;
|
||||
|
||||
/// Runs [state transitions](bevy_ecs::schedule::States).
|
||||
///
|
||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct StateTransition;
|
||||
|
||||
/// Runs the [`FixedMain`] schedule in a loop according until all relevant elapsed time has been "consumed".
|
||||
///
|
||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||
|
|
52
crates/bevy_app/src/panic_handler.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
//! This module provides panic handlers for [Bevy](https://bevyengine.org)
|
||||
//! apps, and automatically configures platform specifics (i.e. WASM or Android).
|
||||
//!
|
||||
//! By default, the [`PanicHandlerPlugin`] from this crate is included in Bevy's `DefaultPlugins`.
|
||||
//!
|
||||
//! For more fine-tuned control over panic behavior, disable the [`PanicHandlerPlugin`] or
|
||||
//! `DefaultPlugins` during app initialization.
|
||||
|
||||
use crate::App;
|
||||
use crate::Plugin;
|
||||
|
||||
/// Adds sensible panic handlers to Apps. This plugin is part of the `DefaultPlugins`. Adding
|
||||
/// this plugin will setup a panic hook appropriate to your target platform:
|
||||
/// * On WASM, uses [`console_error_panic_hook`](https://crates.io/crates/console_error_panic_hook), logging
|
||||
/// to the browser console.
|
||||
/// * Other platforms are currently not setup.
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use bevy_app::{App, NoopPluginGroup as MinimalPlugins, PluginGroup, PanicHandlerPlugin};
|
||||
/// fn main() {
|
||||
/// App::new()
|
||||
/// .add_plugins(MinimalPlugins)
|
||||
/// .add_plugins(PanicHandlerPlugin)
|
||||
/// .run();
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// If you want to setup your own panic handler, you should disable this
|
||||
/// plugin from `DefaultPlugins`:
|
||||
/// ```no_run
|
||||
/// # use bevy_app::{App, NoopPluginGroup as DefaultPlugins, PluginGroup, PanicHandlerPlugin};
|
||||
/// fn main() {
|
||||
/// App::new()
|
||||
/// .add_plugins(DefaultPlugins.build().disable::<PanicHandlerPlugin>())
|
||||
/// .run();
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Default)]
|
||||
pub struct PanicHandlerPlugin;
|
||||
|
||||
impl Plugin for PanicHandlerPlugin {
|
||||
fn build(&self, _app: &mut App) {
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
// Use the default target panic hook - Do nothing.
|
||||
}
|
||||
}
|
||||
}
|
|
@ -100,10 +100,34 @@ impl<T: Fn(&mut App) + Send + Sync + 'static> Plugin for T {
|
|||
}
|
||||
}
|
||||
|
||||
/// Plugins state in the application
|
||||
#[derive(PartialEq, Eq, Debug, Clone, Copy, PartialOrd, Ord)]
|
||||
pub enum PluginsState {
|
||||
/// Plugins are being added.
|
||||
Adding,
|
||||
/// All plugins already added are ready.
|
||||
Ready,
|
||||
/// Finish has been executed for all plugins added.
|
||||
Finished,
|
||||
/// Cleanup has been executed for all plugins added.
|
||||
Cleaned,
|
||||
}
|
||||
|
||||
/// A dummy plugin that's to temporarily occupy an entry in an app's plugin registry.
|
||||
pub(crate) struct PlaceholderPlugin;
|
||||
|
||||
impl Plugin for PlaceholderPlugin {
|
||||
fn build(&self, _app: &mut App) {}
|
||||
}
|
||||
|
||||
/// A type representing an unsafe function that returns a mutable pointer to a [`Plugin`].
|
||||
/// It is used for dynamically loading plugins.
|
||||
///
|
||||
/// See `bevy_dynamic_plugin/src/loader.rs#dynamically_load_plugin`.
|
||||
#[deprecated(
|
||||
since = "0.14.0",
|
||||
note = "The current dynamic plugin system is unsound and will be removed in 0.15."
|
||||
)]
|
||||
pub type CreatePlugin = unsafe fn() -> *mut dyn Plugin;
|
||||
|
||||
/// Types that represent a set of [`Plugin`]s.
|
||||
|
@ -115,8 +139,7 @@ pub trait Plugins<Marker>: sealed::Plugins<Marker> {}
|
|||
impl<Marker, T> Plugins<Marker> for T where T: sealed::Plugins<Marker> {}
|
||||
|
||||
mod sealed {
|
||||
|
||||
use bevy_ecs::all_tuples;
|
||||
use bevy_utils::all_tuples;
|
||||
|
||||
use crate::{App, AppError, Plugin, PluginGroup};
|
||||
|
||||
|
|
|
@ -66,13 +66,25 @@ impl PluginGroupBuilder {
|
|||
// Insert the new plugin as enabled, and removes its previous ordering if it was
|
||||
// already present
|
||||
fn upsert_plugin_state<T: Plugin>(&mut self, plugin: T, added_at_index: usize) {
|
||||
if let Some(entry) = self.plugins.insert(
|
||||
self.upsert_plugin_entry_state(
|
||||
TypeId::of::<T>(),
|
||||
PluginEntry {
|
||||
plugin: Box::new(plugin),
|
||||
enabled: true,
|
||||
},
|
||||
) {
|
||||
added_at_index,
|
||||
);
|
||||
}
|
||||
|
||||
// Insert the new plugin entry as enabled, and removes its previous ordering if it was
|
||||
// already present
|
||||
fn upsert_plugin_entry_state(
|
||||
&mut self,
|
||||
key: TypeId,
|
||||
plugin: PluginEntry,
|
||||
added_at_index: usize,
|
||||
) {
|
||||
if let Some(entry) = self.plugins.insert(key, plugin) {
|
||||
if entry.enabled {
|
||||
warn!(
|
||||
"You are replacing plugin '{}' that was not disabled.",
|
||||
|
@ -83,7 +95,7 @@ impl PluginGroupBuilder {
|
|||
.order
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(i, ty)| *i != added_at_index && **ty == TypeId::of::<T>())
|
||||
.find(|(i, ty)| *i != added_at_index && **ty == key)
|
||||
.map(|(i, _)| i)
|
||||
{
|
||||
self.order.remove(to_remove);
|
||||
|
@ -118,6 +130,26 @@ impl PluginGroupBuilder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Adds a [`PluginGroup`] at the end of this [`PluginGroupBuilder`]. If the plugin was
|
||||
/// already in the group, it is removed from its previous place.
|
||||
pub fn add_group(mut self, group: impl PluginGroup) -> Self {
|
||||
let Self {
|
||||
mut plugins, order, ..
|
||||
} = group.build();
|
||||
|
||||
for plugin_id in order {
|
||||
self.upsert_plugin_entry_state(
|
||||
plugin_id,
|
||||
plugins.remove(&plugin_id).unwrap(),
|
||||
self.order.len(),
|
||||
);
|
||||
|
||||
self.order.push(plugin_id);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds a [`Plugin`] in this [`PluginGroupBuilder`] before the plugin of type `Target`.
|
||||
/// If the plugin was already the group, it is removed from its previous place. There must
|
||||
/// be a plugin of type `Target` in the group or it will panic.
|
||||
|
@ -335,4 +367,48 @@ mod tests {
|
|||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_basic_subgroup() {
|
||||
let group_a = PluginGroupBuilder::start::<NoopPluginGroup>()
|
||||
.add(PluginA)
|
||||
.add(PluginB);
|
||||
|
||||
let group_b = PluginGroupBuilder::start::<NoopPluginGroup>()
|
||||
.add_group(group_a)
|
||||
.add(PluginC);
|
||||
|
||||
assert_eq!(
|
||||
group_b.order,
|
||||
vec![
|
||||
std::any::TypeId::of::<PluginA>(),
|
||||
std::any::TypeId::of::<PluginB>(),
|
||||
std::any::TypeId::of::<PluginC>(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_conflicting_subgroup() {
|
||||
let group_a = PluginGroupBuilder::start::<NoopPluginGroup>()
|
||||
.add(PluginA)
|
||||
.add(PluginC);
|
||||
|
||||
let group_b = PluginGroupBuilder::start::<NoopPluginGroup>()
|
||||
.add(PluginB)
|
||||
.add(PluginC);
|
||||
|
||||
let group = PluginGroupBuilder::start::<NoopPluginGroup>()
|
||||
.add_group(group_a)
|
||||
.add_group(group_b);
|
||||
|
||||
assert_eq!(
|
||||
group.order,
|
||||
vec![
|
||||
std::any::TypeId::of::<PluginA>(),
|
||||
std::any::TypeId::of::<PluginB>(),
|
||||
std::any::TypeId::of::<PluginC>(),
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ use crate::{
|
|||
plugin::Plugin,
|
||||
PluginsState,
|
||||
};
|
||||
use bevy_ecs::event::{Events, ManualEventReader};
|
||||
use bevy_utils::{Duration, Instant};
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
|
@ -82,25 +81,27 @@ impl Plugin for ScheduleRunnerPlugin {
|
|||
app.cleanup();
|
||||
}
|
||||
|
||||
let mut app_exit_event_reader = ManualEventReader::<AppExit>::default();
|
||||
match run_mode {
|
||||
RunMode::Once => app.update(),
|
||||
RunMode::Once => {
|
||||
app.update();
|
||||
|
||||
if let Some(exit) = app.should_exit() {
|
||||
return exit;
|
||||
}
|
||||
|
||||
AppExit::Success
|
||||
}
|
||||
RunMode::Loop { wait } => {
|
||||
let mut tick = move |app: &mut App,
|
||||
wait: Option<Duration>|
|
||||
let tick = move |app: &mut App,
|
||||
wait: Option<Duration>|
|
||||
-> Result<Option<Duration>, AppExit> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
app.update();
|
||||
|
||||
if let Some(app_exit_events) =
|
||||
app.world.get_resource_mut::<Events<AppExit>>()
|
||||
{
|
||||
if let Some(exit) = app_exit_event_reader.read(&app_exit_events).last()
|
||||
{
|
||||
return Err(exit.clone());
|
||||
}
|
||||
}
|
||||
if let Some(exit) = app.should_exit() {
|
||||
return Err(exit);
|
||||
};
|
||||
|
||||
let end_time = Instant::now();
|
||||
|
||||
|
@ -116,43 +117,54 @@ impl Plugin for ScheduleRunnerPlugin {
|
|||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
while let Ok(delay) = tick(&mut app, wait) {
|
||||
if let Some(delay) = delay {
|
||||
std::thread::sleep(delay);
|
||||
loop {
|
||||
match tick(&mut app, wait) {
|
||||
Ok(Some(delay)) => std::thread::sleep(delay),
|
||||
Ok(None) => continue,
|
||||
Err(exit) => return exit,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
fn set_timeout(f: &Closure<dyn FnMut()>, dur: Duration) {
|
||||
fn set_timeout(callback: &Closure<dyn FnMut()>, dur: Duration) {
|
||||
web_sys::window()
|
||||
.unwrap()
|
||||
.set_timeout_with_callback_and_timeout_and_arguments_0(
|
||||
f.as_ref().unchecked_ref(),
|
||||
callback.as_ref().unchecked_ref(),
|
||||
dur.as_millis() as i32,
|
||||
)
|
||||
.expect("Should register `setTimeout`.");
|
||||
}
|
||||
let asap = Duration::from_millis(1);
|
||||
|
||||
let mut rc = Rc::new(app);
|
||||
let f = Rc::new(RefCell::new(None));
|
||||
let g = f.clone();
|
||||
let exit = Rc::new(RefCell::new(AppExit::Success));
|
||||
let closure_exit = exit.clone();
|
||||
|
||||
let c = move || {
|
||||
let mut app = Rc::get_mut(&mut rc).unwrap();
|
||||
let delay = tick(&mut app, wait);
|
||||
let mut app = Rc::new(app);
|
||||
let moved_tick_closure = Rc::new(RefCell::new(None));
|
||||
let base_tick_closure = moved_tick_closure.clone();
|
||||
|
||||
let tick_app = move || {
|
||||
let app = Rc::get_mut(&mut app).unwrap();
|
||||
let delay = tick(app, wait);
|
||||
match delay {
|
||||
Ok(delay) => {
|
||||
set_timeout(f.borrow().as_ref().unwrap(), delay.unwrap_or(asap))
|
||||
Ok(delay) => set_timeout(
|
||||
moved_tick_closure.borrow().as_ref().unwrap(),
|
||||
delay.unwrap_or(asap),
|
||||
),
|
||||
Err(code) => {
|
||||
closure_exit.replace(code);
|
||||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
};
|
||||
*g.borrow_mut() = Some(Closure::wrap(Box::new(c) as Box<dyn FnMut()>));
|
||||
set_timeout(g.borrow().as_ref().unwrap(), asap);
|
||||
};
|
||||
*base_tick_closure.borrow_mut() =
|
||||
Some(Closure::wrap(Box::new(tick_app) as Box<dyn FnMut()>));
|
||||
set_timeout(base_tick_closure.borrow().as_ref().unwrap(), asap);
|
||||
|
||||
exit.take()
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
514
crates/bevy_app/src/sub_app.rs
Normal file
|
@ -0,0 +1,514 @@
|
|||
use crate::{App, InternedAppLabel, Plugin, Plugins, PluginsState, Startup};
|
||||
use bevy_ecs::{
|
||||
event::EventRegistry,
|
||||
prelude::*,
|
||||
schedule::{InternedScheduleLabel, ScheduleBuildSettings, ScheduleLabel},
|
||||
system::SystemId,
|
||||
};
|
||||
#[cfg(feature = "bevy_state")]
|
||||
use bevy_state::{
|
||||
prelude::*,
|
||||
state::{setup_state_transitions_in_world, FreelyMutableState},
|
||||
};
|
||||
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
use bevy_utils::{HashMap, HashSet};
|
||||
use std::fmt::Debug;
|
||||
|
||||
type ExtractFn = Box<dyn Fn(&mut World, &mut World) + Send>;
|
||||
|
||||
/// A secondary application with its own [`World`]. These can run independently of each other.
|
||||
///
|
||||
/// These are useful for situations where certain processes (e.g. a render thread) need to be kept
|
||||
/// separate from the main application.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use bevy_app::{App, AppLabel, SubApp, Main};
|
||||
/// # use bevy_ecs::prelude::*;
|
||||
/// # use bevy_ecs::schedule::ScheduleLabel;
|
||||
///
|
||||
/// #[derive(Resource, Default)]
|
||||
/// struct Val(pub i32);
|
||||
///
|
||||
/// #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, AppLabel)]
|
||||
/// struct ExampleApp;
|
||||
///
|
||||
/// // Create an app with a certain resource.
|
||||
/// let mut app = App::new();
|
||||
/// app.insert_resource(Val(10));
|
||||
///
|
||||
/// // Create a sub-app with the same resource and a single schedule.
|
||||
/// let mut sub_app = SubApp::new();
|
||||
/// sub_app.insert_resource(Val(100));
|
||||
///
|
||||
/// // Setup an extract function to copy the resource's value in the main world.
|
||||
/// sub_app.set_extract(|main_world, sub_world| {
|
||||
/// sub_world.resource_mut::<Val>().0 = main_world.resource::<Val>().0;
|
||||
/// });
|
||||
///
|
||||
/// // Schedule a system that will verify extraction is working.
|
||||
/// sub_app.add_systems(Main, |counter: Res<Val>| {
|
||||
/// // The value will be copied during extraction, so we should see 10 instead of 100.
|
||||
/// assert_eq!(counter.0, 10);
|
||||
/// });
|
||||
///
|
||||
/// // Add the sub-app to the main app.
|
||||
/// app.insert_sub_app(ExampleApp, sub_app);
|
||||
///
|
||||
/// // Update the application once (using the default runner).
|
||||
/// app.run();
|
||||
/// ```
|
||||
pub struct SubApp {
|
||||
/// The data of this application.
|
||||
world: World,
|
||||
/// List of plugins that have been added.
|
||||
pub(crate) plugin_registry: Vec<Box<dyn Plugin>>,
|
||||
/// The names of plugins that have been added to this app. (used to track duplicates and
|
||||
/// already-registered plugins)
|
||||
pub(crate) plugin_names: HashSet<String>,
|
||||
/// Panics if an update is attempted while plugins are building.
|
||||
pub(crate) plugin_build_depth: usize,
|
||||
pub(crate) plugins_state: PluginsState,
|
||||
/// The schedule that will be run by [`update`](Self::update).
|
||||
pub update_schedule: Option<InternedScheduleLabel>,
|
||||
/// A function that gives mutable access to two app worlds. This is primarily
|
||||
/// intended for copying data from the main world to secondary worlds.
|
||||
extract: Option<ExtractFn>,
|
||||
}
|
||||
|
||||
impl Debug for SubApp {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "SubApp")
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for SubApp {
|
||||
fn default() -> Self {
|
||||
let mut world = World::new();
|
||||
world.init_resource::<Schedules>();
|
||||
Self {
|
||||
world,
|
||||
plugin_registry: Vec::default(),
|
||||
plugin_names: HashSet::default(),
|
||||
plugin_build_depth: 0,
|
||||
plugins_state: PluginsState::Adding,
|
||||
update_schedule: None,
|
||||
extract: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubApp {
|
||||
/// Returns a default, empty [`SubApp`].
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// This method is a workaround. Each [`SubApp`] can have its own plugins, but [`Plugin`]
|
||||
/// works on an [`App`] as a whole.
|
||||
fn run_as_app<F>(&mut self, f: F)
|
||||
where
|
||||
F: FnOnce(&mut App),
|
||||
{
|
||||
let mut app = App::empty();
|
||||
std::mem::swap(self, &mut app.sub_apps.main);
|
||||
f(&mut app);
|
||||
std::mem::swap(self, &mut app.sub_apps.main);
|
||||
}
|
||||
|
||||
/// Returns a reference to the [`World`].
|
||||
pub fn world(&self) -> &World {
|
||||
&self.world
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the [`World`].
|
||||
pub fn world_mut(&mut self) -> &mut World {
|
||||
&mut self.world
|
||||
}
|
||||
|
||||
/// Runs the default schedule.
|
||||
pub fn update(&mut self) {
|
||||
if self.is_building_plugins() {
|
||||
panic!("SubApp::update() was called while a plugin was building.");
|
||||
}
|
||||
|
||||
if let Some(label) = self.update_schedule {
|
||||
self.world.run_schedule(label);
|
||||
}
|
||||
self.world.clear_trackers();
|
||||
}
|
||||
|
||||
/// Extracts data from `world` into the app's world using the registered extract method.
|
||||
///
|
||||
/// **Note:** There is no default extract method. Calling `extract` does nothing if
|
||||
/// [`set_extract`](Self::set_extract) has not been called.
|
||||
pub fn extract(&mut self, world: &mut World) {
|
||||
if let Some(f) = self.extract.as_mut() {
|
||||
f(world, &mut self.world);
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the method that will be called by [`extract`](Self::extract).
|
||||
///
|
||||
/// The first argument is the `World` to extract data from, the second argument is the app `World`.
|
||||
pub fn set_extract<F>(&mut self, extract: F) -> &mut Self
|
||||
where
|
||||
F: Fn(&mut World, &mut World) + Send + 'static,
|
||||
{
|
||||
self.extract = Some(Box::new(extract));
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::insert_resource`].
|
||||
pub fn insert_resource<R: Resource>(&mut self, resource: R) -> &mut Self {
|
||||
self.world.insert_resource(resource);
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::init_resource`].
|
||||
pub fn init_resource<R: Resource + FromWorld>(&mut self) -> &mut Self {
|
||||
self.world.init_resource::<R>();
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::add_systems`].
|
||||
pub fn add_systems<M>(
|
||||
&mut self,
|
||||
schedule: impl ScheduleLabel,
|
||||
systems: impl IntoSystemConfigs<M>,
|
||||
) -> &mut Self {
|
||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||
schedules.add_systems(schedule, systems);
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::register_system`].
|
||||
pub fn register_system<I: 'static, O: 'static, M, S: IntoSystem<I, O, M> + 'static>(
|
||||
&mut self,
|
||||
system: S,
|
||||
) -> SystemId<I, O> {
|
||||
self.world.register_system(system)
|
||||
}
|
||||
|
||||
/// See [`App::configure_sets`].
|
||||
#[track_caller]
|
||||
pub fn configure_sets(
|
||||
&mut self,
|
||||
schedule: impl ScheduleLabel,
|
||||
sets: impl IntoSystemSetConfigs,
|
||||
) -> &mut Self {
|
||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||
schedules.configure_sets(schedule, sets);
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::add_schedule`].
|
||||
pub fn add_schedule(&mut self, schedule: Schedule) -> &mut Self {
|
||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||
schedules.insert(schedule);
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::init_schedule`].
|
||||
pub fn init_schedule(&mut self, label: impl ScheduleLabel) -> &mut Self {
|
||||
let label = label.intern();
|
||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||
if !schedules.contains(label) {
|
||||
schedules.insert(Schedule::new(label));
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::get_schedule`].
|
||||
pub fn get_schedule(&self, label: impl ScheduleLabel) -> Option<&Schedule> {
|
||||
let schedules = self.world.get_resource::<Schedules>()?;
|
||||
schedules.get(label)
|
||||
}
|
||||
|
||||
/// See [`App::get_schedule_mut`].
|
||||
pub fn get_schedule_mut(&mut self, label: impl ScheduleLabel) -> Option<&mut Schedule> {
|
||||
let schedules = self.world.get_resource_mut::<Schedules>()?;
|
||||
// We must call `.into_inner` here because the borrow checker only understands reborrows
|
||||
// using ordinary references, not our `Mut` smart pointers.
|
||||
schedules.into_inner().get_mut(label)
|
||||
}
|
||||
|
||||
/// See [`App::edit_schedule`].
|
||||
pub fn edit_schedule(
|
||||
&mut self,
|
||||
label: impl ScheduleLabel,
|
||||
mut f: impl FnMut(&mut Schedule),
|
||||
) -> &mut Self {
|
||||
let label = label.intern();
|
||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||
if !schedules.contains(label) {
|
||||
schedules.insert(Schedule::new(label));
|
||||
}
|
||||
|
||||
let schedule = schedules.get_mut(label).unwrap();
|
||||
f(schedule);
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::configure_schedules`].
|
||||
pub fn configure_schedules(
|
||||
&mut self,
|
||||
schedule_build_settings: ScheduleBuildSettings,
|
||||
) -> &mut Self {
|
||||
self.world_mut()
|
||||
.resource_mut::<Schedules>()
|
||||
.configure_schedules(schedule_build_settings);
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::allow_ambiguous_component`].
|
||||
pub fn allow_ambiguous_component<T: Component>(&mut self) -> &mut Self {
|
||||
self.world_mut().allow_ambiguous_component::<T>();
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::allow_ambiguous_resource`].
|
||||
pub fn allow_ambiguous_resource<T: Resource>(&mut self) -> &mut Self {
|
||||
self.world_mut().allow_ambiguous_resource::<T>();
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::ignore_ambiguity`].
|
||||
#[track_caller]
|
||||
pub fn ignore_ambiguity<M1, M2, S1, S2>(
|
||||
&mut self,
|
||||
schedule: impl ScheduleLabel,
|
||||
a: S1,
|
||||
b: S2,
|
||||
) -> &mut Self
|
||||
where
|
||||
S1: IntoSystemSet<M1>,
|
||||
S2: IntoSystemSet<M2>,
|
||||
{
|
||||
let schedule = schedule.intern();
|
||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||
|
||||
schedules.ignore_ambiguity(schedule, a, b);
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(feature = "bevy_state")]
|
||||
/// See [`App::init_state`].
|
||||
pub fn init_state<S: FreelyMutableState + FromWorld>(&mut self) -> &mut Self {
|
||||
if !self.world.contains_resource::<State<S>>() {
|
||||
setup_state_transitions_in_world(&mut self.world, Some(Startup.intern()));
|
||||
self.init_resource::<State<S>>()
|
||||
.init_resource::<NextState<S>>()
|
||||
.add_event::<StateTransitionEvent<S>>();
|
||||
let schedule = self.get_schedule_mut(StateTransition).unwrap();
|
||||
S::register_state(schedule);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(feature = "bevy_state")]
|
||||
/// See [`App::insert_state`].
|
||||
pub fn insert_state<S: FreelyMutableState>(&mut self, state: S) -> &mut Self {
|
||||
if !self.world.contains_resource::<State<S>>() {
|
||||
setup_state_transitions_in_world(&mut self.world, Some(Startup.intern()));
|
||||
self.insert_resource::<State<S>>(State::new(state))
|
||||
.init_resource::<NextState<S>>()
|
||||
.add_event::<StateTransitionEvent<S>>();
|
||||
|
||||
let schedule = self.get_schedule_mut(StateTransition).unwrap();
|
||||
S::register_state(schedule);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(feature = "bevy_state")]
|
||||
/// See [`App::add_computed_state`].
|
||||
pub fn add_computed_state<S: ComputedStates>(&mut self) -> &mut Self {
|
||||
if !self
|
||||
.world
|
||||
.contains_resource::<Events<StateTransitionEvent<S>>>()
|
||||
{
|
||||
setup_state_transitions_in_world(&mut self.world, Some(Startup.intern()));
|
||||
self.add_event::<StateTransitionEvent<S>>();
|
||||
let schedule = self.get_schedule_mut(StateTransition).unwrap();
|
||||
S::register_computed_state_systems(schedule);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(feature = "bevy_state")]
|
||||
/// See [`App::add_sub_state`].
|
||||
pub fn add_sub_state<S: SubStates>(&mut self) -> &mut Self {
|
||||
if !self
|
||||
.world
|
||||
.contains_resource::<Events<StateTransitionEvent<S>>>()
|
||||
{
|
||||
setup_state_transitions_in_world(&mut self.world, Some(Startup.intern()));
|
||||
self.init_resource::<NextState<S>>();
|
||||
self.add_event::<StateTransitionEvent<S>>();
|
||||
let schedule = self.get_schedule_mut(StateTransition).unwrap();
|
||||
S::register_sub_state_systems(schedule);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::add_event`].
|
||||
pub fn add_event<T>(&mut self) -> &mut Self
|
||||
where
|
||||
T: Event,
|
||||
{
|
||||
if !self.world.contains_resource::<Events<T>>() {
|
||||
EventRegistry::register_event::<T>(self.world_mut());
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::add_plugins`].
|
||||
pub fn add_plugins<M>(&mut self, plugins: impl Plugins<M>) -> &mut Self {
|
||||
self.run_as_app(|app| plugins.add_to_app(app));
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::is_plugin_added`].
|
||||
pub fn is_plugin_added<T>(&self) -> bool
|
||||
where
|
||||
T: Plugin,
|
||||
{
|
||||
self.plugin_names.contains(std::any::type_name::<T>())
|
||||
}
|
||||
|
||||
/// See [`App::get_added_plugins`].
|
||||
pub fn get_added_plugins<T>(&self) -> Vec<&T>
|
||||
where
|
||||
T: Plugin,
|
||||
{
|
||||
self.plugin_registry
|
||||
.iter()
|
||||
.filter_map(|p| p.downcast_ref())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns `true` if there is no plugin in the middle of being built.
|
||||
pub(crate) fn is_building_plugins(&self) -> bool {
|
||||
self.plugin_build_depth > 0
|
||||
}
|
||||
|
||||
/// Return the state of plugins.
|
||||
#[inline]
|
||||
pub fn plugins_state(&mut self) -> PluginsState {
|
||||
match self.plugins_state {
|
||||
PluginsState::Adding => {
|
||||
let mut state = PluginsState::Ready;
|
||||
let plugins = std::mem::take(&mut self.plugin_registry);
|
||||
self.run_as_app(|app| {
|
||||
for plugin in &plugins {
|
||||
if !plugin.ready(app) {
|
||||
state = PluginsState::Adding;
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
self.plugin_registry = plugins;
|
||||
state
|
||||
}
|
||||
state => state,
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs [`Plugin::finish`] for each plugin.
|
||||
pub fn finish(&mut self) {
|
||||
let plugins = std::mem::take(&mut self.plugin_registry);
|
||||
self.run_as_app(|app| {
|
||||
for plugin in &plugins {
|
||||
plugin.finish(app);
|
||||
}
|
||||
});
|
||||
self.plugin_registry = plugins;
|
||||
self.plugins_state = PluginsState::Finished;
|
||||
}
|
||||
|
||||
/// Runs [`Plugin::cleanup`] for each plugin.
|
||||
pub fn cleanup(&mut self) {
|
||||
let plugins = std::mem::take(&mut self.plugin_registry);
|
||||
self.run_as_app(|app| {
|
||||
for plugin in &plugins {
|
||||
plugin.cleanup(app);
|
||||
}
|
||||
});
|
||||
self.plugin_registry = plugins;
|
||||
self.plugins_state = PluginsState::Cleaned;
|
||||
}
|
||||
|
||||
/// See [`App::register_type`].
|
||||
#[cfg(feature = "bevy_reflect")]
|
||||
pub fn register_type<T: bevy_reflect::GetTypeRegistration>(&mut self) -> &mut Self {
|
||||
let registry = self.world.resource_mut::<AppTypeRegistry>();
|
||||
registry.write().register::<T>();
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::register_type_data`].
|
||||
#[cfg(feature = "bevy_reflect")]
|
||||
pub fn register_type_data<
|
||||
T: bevy_reflect::Reflect + bevy_reflect::TypePath,
|
||||
D: bevy_reflect::TypeData + bevy_reflect::FromType<T>,
|
||||
>(
|
||||
&mut self,
|
||||
) -> &mut Self {
|
||||
let registry = self.world.resource_mut::<AppTypeRegistry>();
|
||||
registry.write().register_type_data::<T, D>();
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// The collection of sub-apps that belong to an [`App`].
|
||||
#[derive(Default)]
|
||||
pub struct SubApps {
|
||||
/// The primary sub-app that contains the "main" world.
|
||||
pub main: SubApp,
|
||||
/// Other, labeled sub-apps.
|
||||
pub sub_apps: HashMap<InternedAppLabel, SubApp>,
|
||||
}
|
||||
|
||||
impl SubApps {
|
||||
/// Calls [`update`](SubApp::update) for the main sub-app, and then calls
|
||||
/// [`extract`](SubApp::extract) and [`update`](SubApp::update) for the rest.
|
||||
pub fn update(&mut self) {
|
||||
#[cfg(feature = "trace")]
|
||||
let _bevy_update_span = info_span!("update").entered();
|
||||
{
|
||||
#[cfg(feature = "trace")]
|
||||
let _bevy_frame_update_span = info_span!("main app").entered();
|
||||
self.main.update();
|
||||
}
|
||||
for (_label, sub_app) in self.sub_apps.iter_mut() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _sub_app_span = info_span!("sub app", name = ?_label).entered();
|
||||
sub_app.extract(&mut self.main.world);
|
||||
sub_app.update();
|
||||
}
|
||||
|
||||
self.main.world.clear_trackers();
|
||||
}
|
||||
|
||||
/// Returns an iterator over the sub-apps (starting with the main one).
|
||||
pub fn iter(&self) -> impl Iterator<Item = &SubApp> + '_ {
|
||||
std::iter::once(&self.main).chain(self.sub_apps.values())
|
||||
}
|
||||
|
||||
/// Returns a mutable iterator over the sub-apps (starting with the main one).
|
||||
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut SubApp> + '_ {
|
||||
std::iter::once(&mut self.main).chain(self.sub_apps.values_mut())
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "bevy_asset"
|
||||
version = "0.12.0"
|
||||
version = "0.14.0-dev"
|
||||
edition = "2021"
|
||||
description = "Provides asset functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
|
@ -13,18 +13,20 @@ keywords = ["bevy"]
|
|||
[features]
|
||||
file_watcher = ["notify-debouncer-full", "watch"]
|
||||
embedded_watcher = ["file_watcher"]
|
||||
multi-threaded = ["bevy_tasks/multi-threaded"]
|
||||
multi_threaded = ["bevy_tasks/multi_threaded"]
|
||||
asset_processor = []
|
||||
watch = []
|
||||
trace = []
|
||||
|
||||
[dependencies]
|
||||
bevy_app = { path = "../bevy_app", version = "0.12.0" }
|
||||
bevy_asset_macros = { path = "macros", version = "0.12.0" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.12.0" }
|
||||
bevy_log = { path = "../bevy_log", version = "0.12.0" }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.12.0" }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.12.0" }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.12.0" }
|
||||
bevy_app = { path = "../bevy_app", version = "0.14.0-dev" }
|
||||
bevy_asset_macros = { path = "macros", version = "0.14.0-dev" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.14.0-dev" }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.14.0-dev", features = [
|
||||
"uuid",
|
||||
] }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.14.0-dev" }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.14.0-dev" }
|
||||
|
||||
async-broadcast = "0.5"
|
||||
async-fs = "2.0"
|
||||
|
@ -38,13 +40,18 @@ parking_lot = { version = "0.12", features = ["arc_lock", "send_guard"] }
|
|||
ron = "0.8"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
uuid = { version = "1.0", features = ["v4"] }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
bevy_winit = { path = "../bevy_winit", version = "0.12.0" }
|
||||
bevy_winit = { path = "../bevy_winit", version = "0.14.0-dev" }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
wasm-bindgen = { version = "0.2" }
|
||||
web-sys = { version = "0.3", features = ["Request", "Window", "Response"] }
|
||||
web-sys = { version = "0.3", features = [
|
||||
"Window",
|
||||
"Response",
|
||||
"WorkerGlobalScope",
|
||||
] }
|
||||
wasm-bindgen-futures = "0.4"
|
||||
js-sys = "0.3"
|
||||
|
||||
|
@ -52,7 +59,12 @@ js-sys = "0.3"
|
|||
notify-debouncer-full = { version = "0.3.1", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
bevy_core = { path = "../bevy_core", version = "0.12.0" }
|
||||
bevy_core = { path = "../bevy_core", version = "0.14.0-dev" }
|
||||
bevy_log = { path = "../bevy_log", version = "0.14.0-dev" }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
|
7
crates/bevy_asset/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Bevy Asset
|
||||
|
||||
[![License](https://img.shields.io/badge/license-MIT%2FApache-blue.svg)](https://github.com/bevyengine/bevy#license)
|
||||
[![Crates.io](https://img.shields.io/crates/v/bevy_asset.svg)](https://crates.io/crates/bevy_asset)
|
||||
[![Downloads](https://img.shields.io/crates/d/bevy_asset.svg)](https://crates.io/crates/bevy_asset)
|
||||
[![Docs](https://docs.rs/bevy_asset/badge.svg)](https://docs.rs/bevy_asset/latest/bevy_asset/)
|
||||
[![Discord](https://img.shields.io/discord/691052431525675048.svg?label=&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/bevy)
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "bevy_asset_macros"
|
||||
version = "0.12.0"
|
||||
version = "0.14.0-dev"
|
||||
edition = "2021"
|
||||
description = "Derive implementations for bevy_asset"
|
||||
homepage = "https://bevyengine.org"
|
||||
|
@ -12,7 +12,7 @@ keywords = ["bevy"]
|
|||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.12.0" }
|
||||
bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.14.0-dev" }
|
||||
|
||||
syn = "2.0"
|
||||
proc-macro2 = "1.0"
|
||||
|
@ -20,3 +20,7 @@ quote = "1.0"
|
|||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// FIXME(3492): remove once docs are ready
|
||||
#![allow(missing_docs)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
|
||||
use bevy_macro_utils::BevyManifest;
|
||||
use proc_macro::{Span, TokenStream};
|
||||
|
|
|
@ -7,7 +7,7 @@ use bevy_ecs::{
|
|||
system::{Res, ResMut, Resource},
|
||||
};
|
||||
use bevy_reflect::{Reflect, TypePath};
|
||||
use bevy_utils::{HashMap, Uuid};
|
||||
use bevy_utils::HashMap;
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
|
@ -17,6 +17,7 @@ use std::{
|
|||
sync::{atomic::AtomicU32, Arc},
|
||||
};
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// A generational runtime-only identifier for a specific [`Asset`] stored in [`Assets`]. This is optimized for efficient runtime
|
||||
/// usage and is not suitable for identifying assets across app runs.
|
||||
|
@ -28,12 +29,29 @@ pub struct AssetIndex {
|
|||
pub(crate) index: u32,
|
||||
}
|
||||
|
||||
impl AssetIndex {
|
||||
/// Convert the [`AssetIndex`] into an opaque blob of bits to transport it in circumstances where carrying a strongly typed index isn't possible.
|
||||
///
|
||||
/// The result of this function should not be relied upon for anything except putting it back into [`AssetIndex::from_bits`] to recover the index.
|
||||
pub fn to_bits(self) -> u64 {
|
||||
let Self { generation, index } = self;
|
||||
((generation as u64) << 32) | index as u64
|
||||
}
|
||||
/// Convert an opaque `u64` acquired from [`AssetIndex::to_bits`] back into an [`AssetIndex`]. This should not be used with any inputs other than those
|
||||
/// derived from [`AssetIndex::to_bits`], as there are no guarantees for what will happen with such inputs.
|
||||
pub fn from_bits(bits: u64) -> Self {
|
||||
let index = ((bits << 32) >> 32) as u32;
|
||||
let generation = (bits >> 32) as u32;
|
||||
Self { generation, index }
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates generational [`AssetIndex`] values and facilitates their reuse.
|
||||
pub(crate) struct AssetIndexAllocator {
|
||||
/// A monotonically increasing index.
|
||||
next_index: AtomicU32,
|
||||
recycled_queue_sender: Sender<AssetIndex>,
|
||||
/// This receives every recycled AssetIndex. It serves as a buffer/queue to store indices ready for reuse.
|
||||
/// This receives every recycled [`AssetIndex`]. It serves as a buffer/queue to store indices ready for reuse.
|
||||
recycled_queue_receiver: Receiver<AssetIndex>,
|
||||
recycled_sender: Sender<AssetIndex>,
|
||||
recycled_receiver: Receiver<AssetIndex>,
|
||||
|
@ -276,6 +294,9 @@ pub struct Assets<A: Asset> {
|
|||
hash_map: HashMap<Uuid, A>,
|
||||
handle_provider: AssetHandleProvider,
|
||||
queued_events: Vec<AssetEvent<A>>,
|
||||
/// Assets managed by the `Assets` struct with live strong `Handle`s
|
||||
/// originating from `get_strong_handle`.
|
||||
duplicate_handles: HashMap<AssetId<A>, u16>,
|
||||
}
|
||||
|
||||
impl<A: Asset> Default for Assets<A> {
|
||||
|
@ -288,6 +309,7 @@ impl<A: Asset> Default for Assets<A> {
|
|||
handle_provider,
|
||||
hash_map: Default::default(),
|
||||
queued_events: Default::default(),
|
||||
duplicate_handles: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -306,8 +328,7 @@ impl<A: Asset> Assets<A> {
|
|||
|
||||
/// Inserts the given `asset`, identified by the given `id`. If an asset already exists for `id`, it will be replaced.
|
||||
pub fn insert(&mut self, id: impl Into<AssetId<A>>, asset: A) {
|
||||
let id: AssetId<A> = id.into();
|
||||
match id {
|
||||
match id.into() {
|
||||
AssetId::Index { index, .. } => {
|
||||
self.insert_with_index(index, asset).unwrap();
|
||||
}
|
||||
|
@ -332,9 +353,11 @@ impl<A: Asset> Assets<A> {
|
|||
}
|
||||
|
||||
/// Returns `true` if the `id` exists in this collection. Otherwise it returns `false`.
|
||||
// PERF: Optimize this or remove it
|
||||
pub fn contains(&self, id: impl Into<AssetId<A>>) -> bool {
|
||||
self.get(id).is_some()
|
||||
match id.into() {
|
||||
AssetId::Index { index, .. } => self.dense_storage.get(index).is_some(),
|
||||
AssetId::Uuid { uuid } => self.hash_map.contains_key(&uuid),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn insert_with_uuid(&mut self, uuid: Uuid, asset: A) -> Option<A> {
|
||||
|
@ -375,18 +398,36 @@ impl<A: Asset> Assets<A> {
|
|||
)
|
||||
}
|
||||
|
||||
/// Retrieves a reference to the [`Asset`] with the given `id`, if its exists.
|
||||
/// Upgrade an `AssetId` into a strong `Handle` that will prevent asset drop.
|
||||
///
|
||||
/// Returns `None` if the provided `id` is not part of this `Assets` collection.
|
||||
/// For example, it may have been dropped earlier.
|
||||
#[inline]
|
||||
pub fn get_strong_handle(&mut self, id: AssetId<A>) -> Option<Handle<A>> {
|
||||
if !self.contains(id) {
|
||||
return None;
|
||||
}
|
||||
*self.duplicate_handles.entry(id).or_insert(0) += 1;
|
||||
let index = match id {
|
||||
AssetId::Index { index, .. } => index.into(),
|
||||
AssetId::Uuid { uuid } => uuid.into(),
|
||||
};
|
||||
Some(Handle::Strong(
|
||||
self.handle_provider.get_handle(index, false, None, None),
|
||||
))
|
||||
}
|
||||
|
||||
/// Retrieves a reference to the [`Asset`] with the given `id`, if it exists.
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
#[inline]
|
||||
pub fn get(&self, id: impl Into<AssetId<A>>) -> Option<&A> {
|
||||
let id: AssetId<A> = id.into();
|
||||
match id {
|
||||
match id.into() {
|
||||
AssetId::Index { index, .. } => self.dense_storage.get(index),
|
||||
AssetId::Uuid { uuid } => self.hash_map.get(&uuid),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves a mutable reference to the [`Asset`] with the given `id`, if its exists.
|
||||
/// Retrieves a mutable reference to the [`Asset`] with the given `id`, if it exists.
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
#[inline]
|
||||
pub fn get_mut(&mut self, id: impl Into<AssetId<A>>) -> Option<&mut A> {
|
||||
|
@ -401,7 +442,7 @@ impl<A: Asset> Assets<A> {
|
|||
result
|
||||
}
|
||||
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if its exists.
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if it exists.
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
pub fn remove(&mut self, id: impl Into<AssetId<A>>) -> Option<A> {
|
||||
let id: AssetId<A> = id.into();
|
||||
|
@ -412,28 +453,33 @@ impl<A: Asset> Assets<A> {
|
|||
result
|
||||
}
|
||||
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if its exists. This skips emitting [`AssetEvent::Removed`].
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if it exists. This skips emitting [`AssetEvent::Removed`].
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
pub fn remove_untracked(&mut self, id: impl Into<AssetId<A>>) -> Option<A> {
|
||||
let id: AssetId<A> = id.into();
|
||||
self.duplicate_handles.remove(&id);
|
||||
match id {
|
||||
AssetId::Index { index, .. } => self.dense_storage.remove_still_alive(index),
|
||||
AssetId::Uuid { uuid } => self.hash_map.remove(&uuid),
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if its exists.
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
pub(crate) fn remove_dropped(&mut self, id: impl Into<AssetId<A>>) -> Option<A> {
|
||||
let id: AssetId<A> = id.into();
|
||||
let result = match id {
|
||||
AssetId::Index { index, .. } => self.dense_storage.remove_dropped(index),
|
||||
AssetId::Uuid { uuid } => self.hash_map.remove(&uuid),
|
||||
/// Removes the [`Asset`] with the given `id`.
|
||||
pub(crate) fn remove_dropped(&mut self, id: AssetId<A>) {
|
||||
match self.duplicate_handles.get_mut(&id) {
|
||||
None | Some(0) => {}
|
||||
Some(value) => {
|
||||
*value -= 1;
|
||||
return;
|
||||
}
|
||||
}
|
||||
let existed = match id {
|
||||
AssetId::Index { index, .. } => self.dense_storage.remove_dropped(index).is_some(),
|
||||
AssetId::Uuid { uuid } => self.hash_map.remove(&uuid).is_some(),
|
||||
};
|
||||
if result.is_some() {
|
||||
if existed {
|
||||
self.queued_events.push(AssetEvent::Removed { id });
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns `true` if there are no assets in this collection.
|
||||
|
@ -503,24 +549,24 @@ impl<A: Asset> Assets<A> {
|
|||
while let Ok(drop_event) = assets.handle_provider.drop_receiver.try_recv() {
|
||||
let id = drop_event.id.typed();
|
||||
|
||||
assets.queued_events.push(AssetEvent::Unused { id });
|
||||
|
||||
if drop_event.asset_server_managed {
|
||||
let untyped_id = drop_event.id.untyped(TypeId::of::<A>());
|
||||
let untyped_id = id.untyped();
|
||||
if let Some(info) = infos.get(untyped_id) {
|
||||
if info.load_state == LoadState::Loading
|
||||
|| info.load_state == LoadState::NotLoaded
|
||||
{
|
||||
if let LoadState::Loading | LoadState::NotLoaded = info.load_state {
|
||||
not_ready.push(drop_event);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if infos.process_handle_drop(untyped_id) {
|
||||
assets.remove_dropped(id);
|
||||
|
||||
// the process_handle_drop call checks whether new handles have been created since the drop event was fired, before removing the asset
|
||||
if !infos.process_handle_drop(untyped_id) {
|
||||
// a new handle has been created, or the asset doesn't exist
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
assets.remove_dropped(id);
|
||||
}
|
||||
|
||||
assets.queued_events.push(AssetEvent::Unused { id });
|
||||
assets.remove_dropped(id);
|
||||
}
|
||||
|
||||
// TODO: this is _extremely_ inefficient find a better fix
|
||||
|
@ -536,6 +582,14 @@ impl<A: Asset> Assets<A> {
|
|||
pub fn asset_events(mut assets: ResMut<Self>, mut events: EventWriter<AssetEvent<A>>) {
|
||||
events.send_batch(assets.queued_events.drain(..));
|
||||
}
|
||||
|
||||
/// A run condition for [`asset_events`]. The system will not run if there are no events to
|
||||
/// flush.
|
||||
///
|
||||
/// [`asset_events`]: Self::asset_events
|
||||
pub(crate) fn asset_events_condition(assets: Res<Self>) -> bool {
|
||||
!assets.queued_events.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// A mutable iterator over [`Assets`].
|
||||
|
@ -585,3 +639,18 @@ pub struct InvalidGenerationError {
|
|||
index: AssetIndex,
|
||||
current_generation: u32,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::AssetIndex;
|
||||
|
||||
#[test]
|
||||
fn asset_index_round_trip() {
|
||||
let asset_index = AssetIndex {
|
||||
generation: 42,
|
||||
index: 1337,
|
||||
};
|
||||
let roundtripped = AssetIndex::from_bits(asset_index.to_bits());
|
||||
assert_eq!(asset_index, roundtripped);
|
||||
}
|
||||
}
|
||||
|
|
50
crates/bevy_asset/src/direct_access_ext.rs
Normal file
|
@ -0,0 +1,50 @@
|
|||
//! Add methods on `World` to simplify loading assets when all
|
||||
//! you have is a `World`.
|
||||
|
||||
use bevy_ecs::world::World;
|
||||
|
||||
use crate::{meta::Settings, Asset, AssetPath, AssetServer, Assets, Handle};
|
||||
|
||||
pub trait DirectAssetAccessExt {
|
||||
/// Insert an asset similarly to [`Assets::add`].
|
||||
fn add_asset<A: Asset>(&mut self, asset: impl Into<A>) -> Handle<A>;
|
||||
|
||||
/// Load an asset similarly to [`AssetServer::load`].
|
||||
fn load_asset<'a, A: Asset>(&self, path: impl Into<AssetPath<'a>>) -> Handle<A>;
|
||||
|
||||
/// Load an asset with settings, similarly to [`AssetServer::load_with_settings`].
|
||||
fn load_asset_with_settings<'a, A: Asset, S: Settings>(
|
||||
&self,
|
||||
path: impl Into<AssetPath<'a>>,
|
||||
settings: impl Fn(&mut S) + Send + Sync + 'static,
|
||||
) -> Handle<A>;
|
||||
}
|
||||
impl DirectAssetAccessExt for World {
|
||||
/// Insert an asset similarly to [`Assets::add`].
|
||||
///
|
||||
/// # Panics
|
||||
/// If `self` doesn't have an [`AssetServer`] resource initialized yet.
|
||||
fn add_asset<'a, A: Asset>(&mut self, asset: impl Into<A>) -> Handle<A> {
|
||||
self.resource_mut::<Assets<A>>().add(asset)
|
||||
}
|
||||
|
||||
/// Load an asset similarly to [`AssetServer::load`].
|
||||
///
|
||||
/// # Panics
|
||||
/// If `self` doesn't have an [`AssetServer`] resource initialized yet.
|
||||
fn load_asset<'a, A: Asset>(&self, path: impl Into<AssetPath<'a>>) -> Handle<A> {
|
||||
self.resource::<AssetServer>().load(path)
|
||||
}
|
||||
/// Load an asset with settings, similarly to [`AssetServer::load_with_settings`].
|
||||
///
|
||||
/// # Panics
|
||||
/// If `self` doesn't have an [`AssetServer`] resource initialized yet.
|
||||
fn load_asset_with_settings<'a, A: Asset, S: Settings>(
|
||||
&self,
|
||||
path: impl Into<AssetPath<'a>>,
|
||||
settings: impl Fn(&mut S) + Send + Sync + 'static,
|
||||
) -> Handle<A> {
|
||||
self.resource::<AssetServer>()
|
||||
.load_with_settings(path, settings)
|
||||
}
|
||||
}
|
|
@ -3,8 +3,8 @@ use crate::{
|
|||
UntypedAssetId,
|
||||
};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_reflect::{Reflect, TypePath};
|
||||
use bevy_utils::{get_short_name, Uuid};
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect, TypePath};
|
||||
use bevy_utils::get_short_name;
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
|
@ -12,6 +12,7 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Provides [`Handle`] and [`UntypedHandle`] _for a specific asset type_.
|
||||
/// This should _only_ be used for one specific asset type.
|
||||
|
@ -23,6 +24,7 @@ pub struct AssetHandleProvider {
|
|||
pub(crate) type_id: TypeId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DropEvent {
|
||||
pub(crate) id: InternalAssetId,
|
||||
pub(crate) asset_server_managed: bool,
|
||||
|
@ -121,7 +123,7 @@ impl std::fmt::Debug for StrongHandle {
|
|||
///
|
||||
/// [`Handle::Strong`] also provides access to useful [`Asset`] metadata, such as the [`AssetPath`] (if it exists).
|
||||
#[derive(Component, Reflect)]
|
||||
#[reflect(Component)]
|
||||
#[reflect(Default, Component, Debug, Hash, PartialEq)]
|
||||
pub enum Handle<A: Asset> {
|
||||
/// A "strong" reference to a live (or loading) [`Asset`]. If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
||||
/// alive until the [`Handle`] is dropped. Strong handles also provide access to additional asset metadata.
|
||||
|
@ -247,13 +249,6 @@ impl<A: Asset> PartialEq for Handle<A> {
|
|||
|
||||
impl<A: Asset> Eq for Handle<A> {}
|
||||
|
||||
impl<A: Asset> From<Handle<A>> for AssetId<A> {
|
||||
#[inline]
|
||||
fn from(value: Handle<A>) -> Self {
|
||||
value.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: Asset> From<&Handle<A>> for AssetId<A> {
|
||||
#[inline]
|
||||
fn from(value: &Handle<A>) -> Self {
|
||||
|
@ -261,16 +256,23 @@ impl<A: Asset> From<&Handle<A>> for AssetId<A> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Asset> From<Handle<A>> for UntypedAssetId {
|
||||
impl<A: Asset> From<&Handle<A>> for UntypedAssetId {
|
||||
#[inline]
|
||||
fn from(value: Handle<A>) -> Self {
|
||||
fn from(value: &Handle<A>) -> Self {
|
||||
value.id().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: Asset> From<&Handle<A>> for UntypedAssetId {
|
||||
impl<A: Asset> From<&mut Handle<A>> for AssetId<A> {
|
||||
#[inline]
|
||||
fn from(value: &Handle<A>) -> Self {
|
||||
fn from(value: &mut Handle<A>) -> Self {
|
||||
value.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: Asset> From<&mut Handle<A>> for UntypedAssetId {
|
||||
#[inline]
|
||||
fn from(value: &mut Handle<A>) -> Self {
|
||||
value.id().into()
|
||||
}
|
||||
}
|
||||
|
@ -427,13 +429,6 @@ impl PartialOrd for UntypedHandle {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<UntypedHandle> for UntypedAssetId {
|
||||
#[inline]
|
||||
fn from(value: UntypedHandle) -> Self {
|
||||
value.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&UntypedHandle> for UntypedAssetId {
|
||||
#[inline]
|
||||
fn from(value: &UntypedHandle) -> Self {
|
||||
|
@ -507,13 +502,13 @@ impl<A: Asset> TryFrom<UntypedHandle> for Handle<A> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Errors preventing the conversion of to/from an [`UntypedHandle`] and an [`Handle`].
|
||||
/// Errors preventing the conversion of to/from an [`UntypedHandle`] and a [`Handle`].
|
||||
#[derive(Error, Debug, PartialEq, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub enum UntypedAssetConversionError {
|
||||
/// Caused when trying to convert an [`UntypedHandle`] into an [`Handle`] of the wrong type.
|
||||
/// Caused when trying to convert an [`UntypedHandle`] into a [`Handle`] of the wrong type.
|
||||
#[error(
|
||||
"This UntypedHandle is for {found:?} and cannot be converted into an Handle<{expected:?}>"
|
||||
"This UntypedHandle is for {found:?} and cannot be converted into a Handle<{expected:?}>"
|
||||
)]
|
||||
TypeIdMismatch { expected: TypeId, found: TypeId },
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use crate::{Asset, AssetIndex};
|
||||
use bevy_reflect::Reflect;
|
||||
use bevy_utils::Uuid;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use std::{
|
||||
any::TypeId,
|
||||
|
@ -16,7 +17,7 @@ use thiserror::Error;
|
|||
/// For an identifier tied to the lifetime of an asset, see [`Handle`](`crate::Handle`).
|
||||
///
|
||||
/// For an "untyped" / "generic-less" id, see [`UntypedAssetId`].
|
||||
#[derive(Reflect)]
|
||||
#[derive(Reflect, Serialize, Deserialize)]
|
||||
pub enum AssetId<A: Asset> {
|
||||
/// A small / efficient runtime identifier that can be used to efficiently look up an asset stored in [`Assets`]. This is
|
||||
/// the "default" identifier used for assets. The alternative(s) (ex: [`AssetId::Uuid`]) will only be used if assets are
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
use crate::io::{
|
||||
get_meta_path, AssetReader, AssetReaderError, EmptyPathStream, PathStream, Reader, VecReader,
|
||||
};
|
||||
use bevy_log::error;
|
||||
use bevy_utils::BoxedFuture;
|
||||
use bevy_utils::tracing::error;
|
||||
use std::{ffi::CString, path::Path};
|
||||
|
||||
/// [`AssetReader`] implementation for Android devices, built on top of Android's [`AssetManager`].
|
||||
|
@ -17,57 +16,47 @@ use std::{ffi::CString, path::Path};
|
|||
pub struct AndroidAssetReader;
|
||||
|
||||
impl AssetReader for AndroidAssetReader {
|
||||
fn read<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let asset_manager = bevy_winit::ANDROID_APP
|
||||
.get()
|
||||
.expect("Bevy must be setup with the #[bevy_main] macro on Android")
|
||||
.asset_manager();
|
||||
let mut opened_asset = asset_manager
|
||||
.open(&CString::new(path.to_str().unwrap()).unwrap())
|
||||
.ok_or(AssetReaderError::NotFound(path.to_path_buf()))?;
|
||||
let bytes = opened_asset.buffer()?;
|
||||
let reader: Box<Reader> = Box::new(VecReader::new(bytes.to_vec()));
|
||||
Ok(reader)
|
||||
})
|
||||
async fn read<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let asset_manager = bevy_winit::ANDROID_APP
|
||||
.get()
|
||||
.expect("Bevy must be setup with the #[bevy_main] macro on Android")
|
||||
.asset_manager();
|
||||
let mut opened_asset = asset_manager
|
||||
.open(&CString::new(path.to_str().unwrap()).unwrap())
|
||||
.ok_or(AssetReaderError::NotFound(path.to_path_buf()))?;
|
||||
let bytes = opened_asset.buffer()?;
|
||||
let reader: Box<Reader> = Box::new(VecReader::new(bytes.to_vec()));
|
||||
Ok(reader)
|
||||
}
|
||||
|
||||
fn read_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let meta_path = get_meta_path(path);
|
||||
let asset_manager = bevy_winit::ANDROID_APP
|
||||
.get()
|
||||
.expect("Bevy must be setup with the #[bevy_main] macro on Android")
|
||||
.asset_manager();
|
||||
let mut opened_asset = asset_manager
|
||||
.open(&CString::new(meta_path.to_str().unwrap()).unwrap())
|
||||
.ok_or(AssetReaderError::NotFound(meta_path))?;
|
||||
let bytes = opened_asset.buffer()?;
|
||||
let reader: Box<Reader> = Box::new(VecReader::new(bytes.to_vec()));
|
||||
Ok(reader)
|
||||
})
|
||||
async fn read_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
let asset_manager = bevy_winit::ANDROID_APP
|
||||
.get()
|
||||
.expect("Bevy must be setup with the #[bevy_main] macro on Android")
|
||||
.asset_manager();
|
||||
let mut opened_asset = asset_manager
|
||||
.open(&CString::new(meta_path.to_str().unwrap()).unwrap())
|
||||
.ok_or(AssetReaderError::NotFound(meta_path))?;
|
||||
let bytes = opened_asset.buffer()?;
|
||||
let reader: Box<Reader> = Box::new(VecReader::new(bytes.to_vec()));
|
||||
Ok(reader)
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
async fn read_directory<'a>(
|
||||
&'a self,
|
||||
_path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<PathStream>, AssetReaderError>> {
|
||||
) -> Result<Box<PathStream>, AssetReaderError> {
|
||||
let stream: Box<PathStream> = Box::new(EmptyPathStream);
|
||||
error!("Reading directories is not supported with the AndroidAssetReader");
|
||||
Box::pin(async move { Ok(stream) })
|
||||
Ok(stream)
|
||||
}
|
||||
|
||||
fn is_directory<'a>(
|
||||
async fn is_directory<'a>(
|
||||
&'a self,
|
||||
_path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<bool, AssetReaderError>> {
|
||||
) -> std::result::Result<bool, AssetReaderError> {
|
||||
error!("Reading directories is not supported with the AndroidAssetReader");
|
||||
Box::pin(async move { Ok(false) })
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::io::{
|
|||
memory::Dir,
|
||||
AssetSourceEvent, AssetWatcher,
|
||||
};
|
||||
use bevy_log::warn;
|
||||
use bevy_utils::tracing::warn;
|
||||
use bevy_utils::{Duration, HashMap};
|
||||
use notify_debouncer_full::{notify::RecommendedWatcher, Debouncer, FileIdMap};
|
||||
use parking_lot::RwLock;
|
||||
|
@ -25,7 +25,7 @@ pub struct EmbeddedWatcher {
|
|||
impl EmbeddedWatcher {
|
||||
pub fn new(
|
||||
dir: Dir,
|
||||
root_paths: Arc<RwLock<HashMap<PathBuf, PathBuf>>>,
|
||||
root_paths: Arc<RwLock<HashMap<Box<Path>, PathBuf>>>,
|
||||
sender: crossbeam_channel::Sender<AssetSourceEvent>,
|
||||
debounce_wait_time: Duration,
|
||||
) -> Self {
|
||||
|
@ -49,7 +49,7 @@ impl AssetWatcher for EmbeddedWatcher {}
|
|||
/// the initial static bytes from the file embedded in the binary.
|
||||
pub(crate) struct EmbeddedEventHandler {
|
||||
sender: crossbeam_channel::Sender<AssetSourceEvent>,
|
||||
root_paths: Arc<RwLock<HashMap<PathBuf, PathBuf>>>,
|
||||
root_paths: Arc<RwLock<HashMap<Box<Path>, PathBuf>>>,
|
||||
root: PathBuf,
|
||||
dir: Dir,
|
||||
last_event: Option<AssetSourceEvent>,
|
||||
|
@ -61,7 +61,7 @@ impl FilesystemEventHandler for EmbeddedEventHandler {
|
|||
|
||||
fn get_path(&self, absolute_path: &Path) -> Option<(PathBuf, bool)> {
|
||||
let (local_path, is_meta) = get_asset_path(&self.root, absolute_path);
|
||||
let final_path = self.root_paths.read().get(&local_path)?.clone();
|
||||
let final_path = self.root_paths.read().get(local_path.as_path())?.clone();
|
||||
if is_meta {
|
||||
warn!("Meta file asset hot-reloading is not supported yet: {final_path:?}");
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ pub const EMBEDDED: &str = "embedded";
|
|||
pub struct EmbeddedAssetRegistry {
|
||||
dir: Dir,
|
||||
#[cfg(feature = "embedded_watcher")]
|
||||
root_paths: std::sync::Arc<parking_lot::RwLock<bevy_utils::HashMap<PathBuf, PathBuf>>>,
|
||||
root_paths: std::sync::Arc<parking_lot::RwLock<bevy_utils::HashMap<Box<Path>, PathBuf>>>,
|
||||
}
|
||||
|
||||
impl EmbeddedAssetRegistry {
|
||||
|
@ -35,7 +35,7 @@ impl EmbeddedAssetRegistry {
|
|||
#[cfg(feature = "embedded_watcher")]
|
||||
self.root_paths
|
||||
.write()
|
||||
.insert(full_path.to_owned(), asset_path.to_owned());
|
||||
.insert(full_path.into(), asset_path.to_owned());
|
||||
self.dir.insert_asset(asset_path, value);
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ impl EmbeddedAssetRegistry {
|
|||
#[cfg(feature = "embedded_watcher")]
|
||||
self.root_paths
|
||||
.write()
|
||||
.insert(full_path.to_owned(), asset_path.to_owned());
|
||||
.insert(full_path.into(), asset_path.to_owned());
|
||||
self.dir.insert_meta(asset_path, value);
|
||||
}
|
||||
|
||||
|
@ -183,7 +183,7 @@ pub fn _embedded_asset_path(
|
|||
/// # use bevy_asset::{Asset, AssetServer};
|
||||
/// # use bevy_reflect::TypePath;
|
||||
/// # let asset_server: AssetServer = panic!();
|
||||
/// #[derive(Asset, TypePath)]
|
||||
/// # #[derive(Asset, TypePath)]
|
||||
/// # struct Shader;
|
||||
/// let shader = asset_server.load::<Shader>("embedded://bevy_rock/render/rock.wgsl");
|
||||
/// ```
|
||||
|
@ -219,12 +219,12 @@ pub fn _embedded_asset_path(
|
|||
#[macro_export]
|
||||
macro_rules! embedded_asset {
|
||||
($app: ident, $path: expr) => {{
|
||||
embedded_asset!($app, "src", $path)
|
||||
$crate::embedded_asset!($app, "src", $path)
|
||||
}};
|
||||
|
||||
($app: ident, $source_path: expr, $path: expr) => {{
|
||||
let mut embedded = $app
|
||||
.world
|
||||
.world_mut()
|
||||
.resource_mut::<$crate::io::embedded::EmbeddedAssetRegistry>();
|
||||
let path = $crate::embedded_path!($source_path, $path);
|
||||
let watched_path = $crate::io::embedded::watched_path(file!(), $path);
|
||||
|
@ -253,8 +253,8 @@ pub fn watched_path(_source_file_path: &'static str, _asset_path: &'static str)
|
|||
#[macro_export]
|
||||
macro_rules! load_internal_asset {
|
||||
($app: ident, $handle: expr, $path_str: expr, $loader: expr) => {{
|
||||
let mut assets = $app.world.resource_mut::<$crate::Assets<_>>();
|
||||
assets.insert($handle, ($loader)(
|
||||
let mut assets = $app.world_mut().resource_mut::<$crate::Assets<_>>();
|
||||
assets.insert($handle.id(), ($loader)(
|
||||
include_str!($path_str),
|
||||
std::path::Path::new(file!())
|
||||
.parent()
|
||||
|
@ -265,8 +265,8 @@ macro_rules! load_internal_asset {
|
|||
}};
|
||||
// we can't support params without variadic arguments, so internal assets with additional params can't be hot-reloaded
|
||||
($app: ident, $handle: ident, $path_str: expr, $loader: expr $(, $param:expr)+) => {{
|
||||
let mut assets = $app.world.resource_mut::<$crate::Assets<_>>();
|
||||
assets.insert($handle, ($loader)(
|
||||
let mut assets = $app.world_mut().resource_mut::<$crate::Assets<_>>();
|
||||
assets.insert($handle.id(), ($loader)(
|
||||
include_str!($path_str),
|
||||
std::path::Path::new(file!())
|
||||
.parent()
|
||||
|
@ -282,9 +282,9 @@ macro_rules! load_internal_asset {
|
|||
#[macro_export]
|
||||
macro_rules! load_internal_binary_asset {
|
||||
($app: ident, $handle: expr, $path_str: expr, $loader: expr) => {{
|
||||
let mut assets = $app.world.resource_mut::<$crate::Assets<_>>();
|
||||
let mut assets = $app.world_mut().resource_mut::<$crate::Assets<_>>();
|
||||
assets.insert(
|
||||
$handle,
|
||||
$handle.id(),
|
||||
($loader)(
|
||||
include_bytes!($path_str).as_ref(),
|
||||
std::path::Path::new(file!())
|
||||
|
|
|
@ -3,7 +3,6 @@ use crate::io::{
|
|||
Reader, Writer,
|
||||
};
|
||||
use async_fs::{read_dir, File};
|
||||
use bevy_utils::BoxedFuture;
|
||||
use futures_lite::StreamExt;
|
||||
|
||||
use std::path::Path;
|
||||
|
@ -11,215 +10,168 @@ use std::path::Path;
|
|||
use super::{FileAssetReader, FileAssetWriter};
|
||||
|
||||
impl AssetReader for FileAssetReader {
|
||||
fn read<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
match File::open(&full_path).await {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(file);
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
async fn read<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
match File::open(&full_path).await {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(file);
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn read_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
async fn read_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
match File::open(&full_path).await {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(file);
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
match File::open(&full_path).await {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(file);
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
async fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<PathStream>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
match read_dir(&full_path).await {
|
||||
Ok(read_dir) => {
|
||||
let root_path = self.root_path.clone();
|
||||
let mapped_stream = read_dir.filter_map(move |f| {
|
||||
f.ok().and_then(|dir_entry| {
|
||||
let path = dir_entry.path();
|
||||
// filter out meta files as they are not considered assets
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
if ext.eq_ignore_ascii_case("meta") {
|
||||
return None;
|
||||
}
|
||||
) -> Result<Box<PathStream>, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
match read_dir(&full_path).await {
|
||||
Ok(read_dir) => {
|
||||
let root_path = self.root_path.clone();
|
||||
let mapped_stream = read_dir.filter_map(move |f| {
|
||||
f.ok().and_then(|dir_entry| {
|
||||
let path = dir_entry.path();
|
||||
// filter out meta files as they are not considered assets
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
if ext.eq_ignore_ascii_case("meta") {
|
||||
return None;
|
||||
}
|
||||
let relative_path = path.strip_prefix(&root_path).unwrap();
|
||||
Some(relative_path.to_owned())
|
||||
})
|
||||
});
|
||||
let read_dir: Box<PathStream> = Box::new(mapped_stream);
|
||||
Ok(read_dir)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
let relative_path = path.strip_prefix(&root_path).unwrap();
|
||||
Some(relative_path.to_owned())
|
||||
})
|
||||
});
|
||||
let read_dir: Box<PathStream> = Box::new(mapped_stream);
|
||||
Ok(read_dir)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn is_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<bool, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
let metadata = full_path
|
||||
.metadata()
|
||||
.map_err(|_e| AssetReaderError::NotFound(path.to_owned()))?;
|
||||
Ok(metadata.file_type().is_dir())
|
||||
})
|
||||
async fn is_directory<'a>(&'a self, path: &'a Path) -> Result<bool, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
let metadata = full_path
|
||||
.metadata()
|
||||
.map_err(|_e| AssetReaderError::NotFound(path.to_owned()))?;
|
||||
Ok(metadata.file_type().is_dir())
|
||||
}
|
||||
}
|
||||
|
||||
impl AssetWriter for FileAssetWriter {
|
||||
fn write<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Writer>, AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let file = File::create(&full_path).await?;
|
||||
let writer: Box<Writer> = Box::new(file);
|
||||
Ok(writer)
|
||||
})
|
||||
async fn write<'a>(&'a self, path: &'a Path) -> Result<Box<Writer>, AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let file = File::create(&full_path).await?;
|
||||
let writer: Box<Writer> = Box::new(file);
|
||||
Ok(writer)
|
||||
}
|
||||
|
||||
fn write_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Writer>, AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let file = File::create(&full_path).await?;
|
||||
let writer: Box<Writer> = Box::new(file);
|
||||
Ok(writer)
|
||||
})
|
||||
async fn write_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Writer>, AssetWriterError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let file = File::create(&full_path).await?;
|
||||
let writer: Box<Writer> = Box::new(file);
|
||||
Ok(writer)
|
||||
}
|
||||
|
||||
fn remove<'a>(&'a self, path: &'a Path) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_file(full_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
async fn remove<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_file(full_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_meta<'a>(&'a self, path: &'a Path) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
async_fs::remove_file(full_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
async fn remove_meta<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
async_fs::remove_file(full_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rename<'a>(
|
||||
async fn rename<'a>(
|
||||
&'a self,
|
||||
old_path: &'a Path,
|
||||
new_path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_old_path = self.root_path.join(old_path);
|
||||
let full_new_path = self.root_path.join(new_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
async_fs::rename(full_old_path, full_new_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
) -> Result<(), AssetWriterError> {
|
||||
let full_old_path = self.root_path.join(old_path);
|
||||
let full_new_path = self.root_path.join(new_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
async_fs::rename(full_old_path, full_new_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rename_meta<'a>(
|
||||
async fn rename_meta<'a>(
|
||||
&'a self,
|
||||
old_path: &'a Path,
|
||||
new_path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let old_meta_path = get_meta_path(old_path);
|
||||
let new_meta_path = get_meta_path(new_path);
|
||||
let full_old_path = self.root_path.join(old_meta_path);
|
||||
let full_new_path = self.root_path.join(new_meta_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
async_fs::rename(full_old_path, full_new_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
) -> Result<(), AssetWriterError> {
|
||||
let old_meta_path = get_meta_path(old_path);
|
||||
let new_meta_path = get_meta_path(new_path);
|
||||
let full_old_path = self.root_path.join(old_meta_path);
|
||||
let full_new_path = self.root_path.join(new_meta_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
async_fs::create_dir_all(parent).await?;
|
||||
}
|
||||
async_fs::rename(full_old_path, full_new_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_dir_all(full_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
async fn remove_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_dir_all(full_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_empty_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_dir(full_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
async fn remove_empty_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_dir(full_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_assets_in_directory<'a>(
|
||||
async fn remove_assets_in_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_dir_all(&full_path).await?;
|
||||
async_fs::create_dir_all(&full_path).await?;
|
||||
Ok(())
|
||||
})
|
||||
) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
async_fs::remove_dir_all(&full_path).await?;
|
||||
async_fs::create_dir_all(&full_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::io::{AssetSourceEvent, AssetWatcher};
|
||||
use bevy_log::error;
|
||||
use crate::path::normalize_path;
|
||||
use bevy_utils::tracing::error;
|
||||
use bevy_utils::Duration;
|
||||
use crossbeam_channel::Sender;
|
||||
use notify_debouncer_full::{
|
||||
|
@ -28,7 +29,7 @@ impl FileWatcher {
|
|||
sender: Sender<AssetSourceEvent>,
|
||||
debounce_wait_time: Duration,
|
||||
) -> Result<Self, notify::Error> {
|
||||
let root = super::get_base_path().join(root);
|
||||
let root = normalize_path(super::get_base_path().join(root).as_path());
|
||||
let watcher = new_asset_event_debouncer(
|
||||
root.clone(),
|
||||
debounce_wait_time,
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
#[cfg(feature = "file_watcher")]
|
||||
mod file_watcher;
|
||||
|
||||
#[cfg(feature = "multi-threaded")]
|
||||
#[cfg(feature = "multi_threaded")]
|
||||
mod file_asset;
|
||||
#[cfg(not(feature = "multi-threaded"))]
|
||||
#[cfg(not(feature = "multi_threaded"))]
|
||||
mod sync_file_asset;
|
||||
|
||||
use bevy_log::error;
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "file_watcher")]
|
||||
pub use file_watcher::*;
|
||||
|
||||
|
@ -75,16 +75,15 @@ impl FileAssetWriter {
|
|||
///
|
||||
/// See `get_base_path` below.
|
||||
pub fn new<P: AsRef<Path> + std::fmt::Debug>(path: P, create_root: bool) -> Self {
|
||||
let root_path = get_base_path().join(path.as_ref());
|
||||
if create_root {
|
||||
if let Err(e) = std::fs::create_dir_all(&path) {
|
||||
if let Err(e) = std::fs::create_dir_all(&root_path) {
|
||||
error!(
|
||||
"Failed to create root directory {:?} for file asset writer: {:?}",
|
||||
path, e
|
||||
root_path, e
|
||||
);
|
||||
}
|
||||
}
|
||||
Self {
|
||||
root_path: get_base_path().join(path.as_ref()),
|
||||
}
|
||||
Self { root_path }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
use futures_io::{AsyncRead, AsyncWrite};
|
||||
use futures_io::{AsyncRead, AsyncSeek, AsyncWrite};
|
||||
use futures_lite::Stream;
|
||||
|
||||
use crate::io::{
|
||||
get_meta_path, AssetReader, AssetReaderError, AssetWriter, AssetWriterError, PathStream,
|
||||
Reader, Writer,
|
||||
};
|
||||
use bevy_utils::BoxedFuture;
|
||||
|
||||
use std::{
|
||||
fs::{read_dir, File},
|
||||
io::{Read, Write},
|
||||
io::{Read, Seek, Write},
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
task::Poll,
|
||||
|
@ -31,6 +30,18 @@ impl AsyncRead for FileReader {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsyncSeek for FileReader {
|
||||
fn poll_seek(
|
||||
self: Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
pos: std::io::SeekFrom,
|
||||
) -> Poll<std::io::Result<u64>> {
|
||||
let this = self.get_mut();
|
||||
let seek = this.0.seek(pos);
|
||||
Poll::Ready(seek)
|
||||
}
|
||||
}
|
||||
|
||||
struct FileWriter(File);
|
||||
|
||||
impl AsyncWrite for FileWriter {
|
||||
|
@ -76,221 +87,180 @@ impl Stream for DirReader {
|
|||
}
|
||||
|
||||
impl AssetReader for FileAssetReader {
|
||||
fn read<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
match File::open(&full_path) {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(FileReader(file));
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
async fn read<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
match File::open(&full_path) {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(FileReader(file));
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn read_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
async fn read_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
match File::open(&full_path) {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(FileReader(file));
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
match File::open(&full_path) {
|
||||
Ok(file) => {
|
||||
let reader: Box<Reader> = Box::new(FileReader(file));
|
||||
Ok(reader)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
async fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<PathStream>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
match read_dir(&full_path) {
|
||||
Ok(read_dir) => {
|
||||
let root_path = self.root_path.clone();
|
||||
let mapped_stream = read_dir.filter_map(move |f| {
|
||||
f.ok().and_then(|dir_entry| {
|
||||
let path = dir_entry.path();
|
||||
// filter out meta files as they are not considered assets
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
if ext.eq_ignore_ascii_case("meta") {
|
||||
return None;
|
||||
}
|
||||
) -> Result<Box<PathStream>, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
match read_dir(&full_path) {
|
||||
Ok(read_dir) => {
|
||||
let root_path = self.root_path.clone();
|
||||
let mapped_stream = read_dir.filter_map(move |f| {
|
||||
f.ok().and_then(|dir_entry| {
|
||||
let path = dir_entry.path();
|
||||
// filter out meta files as they are not considered assets
|
||||
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
if ext.eq_ignore_ascii_case("meta") {
|
||||
return None;
|
||||
}
|
||||
let relative_path = path.strip_prefix(&root_path).unwrap();
|
||||
Some(relative_path.to_owned())
|
||||
})
|
||||
});
|
||||
let read_dir: Box<PathStream> = Box::new(DirReader(mapped_stream.collect()));
|
||||
Ok(read_dir)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
let relative_path = path.strip_prefix(&root_path).unwrap();
|
||||
Some(relative_path.to_owned())
|
||||
})
|
||||
});
|
||||
let read_dir: Box<PathStream> = Box::new(DirReader(mapped_stream.collect()));
|
||||
Ok(read_dir)
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Err(AssetReaderError::NotFound(full_path))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn is_directory<'a>(
|
||||
async fn is_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<bool, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
let metadata = full_path
|
||||
.metadata()
|
||||
.map_err(|_e| AssetReaderError::NotFound(path.to_owned()))?;
|
||||
Ok(metadata.file_type().is_dir())
|
||||
})
|
||||
) -> std::result::Result<bool, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
let metadata = full_path
|
||||
.metadata()
|
||||
.map_err(|_e| AssetReaderError::NotFound(path.to_owned()))?;
|
||||
Ok(metadata.file_type().is_dir())
|
||||
}
|
||||
}
|
||||
|
||||
impl AssetWriter for FileAssetWriter {
|
||||
fn write<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Writer>, AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = File::create(&full_path)?;
|
||||
let writer: Box<Writer> = Box::new(FileWriter(file));
|
||||
Ok(writer)
|
||||
})
|
||||
async fn write<'a>(&'a self, path: &'a Path) -> Result<Box<Writer>, AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = File::create(&full_path)?;
|
||||
let writer: Box<Writer> = Box::new(FileWriter(file));
|
||||
Ok(writer)
|
||||
}
|
||||
|
||||
fn write_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Writer>, AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = File::create(&full_path)?;
|
||||
let writer: Box<Writer> = Box::new(FileWriter(file));
|
||||
Ok(writer)
|
||||
})
|
||||
async fn write_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Writer>, AssetWriterError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
if let Some(parent) = full_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let file = File::create(&full_path)?;
|
||||
let writer: Box<Writer> = Box::new(FileWriter(file));
|
||||
Ok(writer)
|
||||
}
|
||||
|
||||
fn remove<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_file(full_path)?;
|
||||
Ok(())
|
||||
})
|
||||
async fn remove<'a>(&'a self, path: &'a Path) -> std::result::Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_file(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_meta<'a>(
|
||||
async fn remove_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
std::fs::remove_file(full_path)?;
|
||||
Ok(())
|
||||
})
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
std::fs::remove_file(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_directory<'a>(
|
||||
async fn remove_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir_all(full_path)?;
|
||||
Ok(())
|
||||
})
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir_all(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_empty_directory<'a>(
|
||||
async fn remove_empty_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir(full_path)?;
|
||||
Ok(())
|
||||
})
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_assets_in_directory<'a>(
|
||||
async fn remove_assets_in_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir_all(&full_path)?;
|
||||
std::fs::create_dir_all(&full_path)?;
|
||||
Ok(())
|
||||
})
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir_all(&full_path)?;
|
||||
std::fs::create_dir_all(&full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rename<'a>(
|
||||
async fn rename<'a>(
|
||||
&'a self,
|
||||
old_path: &'a Path,
|
||||
new_path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let full_old_path = self.root_path.join(old_path);
|
||||
let full_new_path = self.root_path.join(new_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
std::fs::rename(full_old_path, full_new_path)?;
|
||||
Ok(())
|
||||
})
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
let full_old_path = self.root_path.join(old_path);
|
||||
let full_new_path = self.root_path.join(new_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
std::fs::rename(full_old_path, full_new_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rename_meta<'a>(
|
||||
async fn rename_meta<'a>(
|
||||
&'a self,
|
||||
old_path: &'a Path,
|
||||
new_path: &'a Path,
|
||||
) -> BoxedFuture<'a, std::result::Result<(), AssetWriterError>> {
|
||||
Box::pin(async move {
|
||||
let old_meta_path = get_meta_path(old_path);
|
||||
let new_meta_path = get_meta_path(new_path);
|
||||
let full_old_path = self.root_path.join(old_meta_path);
|
||||
let full_new_path = self.root_path.join(new_meta_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
std::fs::rename(full_old_path, full_new_path)?;
|
||||
Ok(())
|
||||
})
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
let old_meta_path = get_meta_path(old_path);
|
||||
let new_meta_path = get_meta_path(new_path);
|
||||
let full_old_path = self.root_path.join(old_meta_path);
|
||||
let full_new_path = self.root_path.join(new_meta_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
std::fs::rename(full_old_path, full_new_path)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
use crate::io::{AssetReader, AssetReaderError, PathStream, Reader};
|
||||
use bevy_utils::{BoxedFuture, HashMap};
|
||||
use bevy_utils::HashMap;
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use parking_lot::RwLock;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
/// A "gated" reader that will prevent asset reads from returning until
|
||||
/// a given path has been "opened" using [`GateOpener`].
|
||||
|
@ -13,7 +10,7 @@ use std::{
|
|||
/// This is built primarily for unit tests.
|
||||
pub struct GatedReader<R: AssetReader> {
|
||||
reader: R,
|
||||
gates: Arc<RwLock<HashMap<PathBuf, (Sender<()>, Receiver<()>)>>>,
|
||||
gates: Arc<RwLock<HashMap<Box<Path>, (Sender<()>, Receiver<()>)>>>,
|
||||
}
|
||||
|
||||
impl<R: AssetReader + Clone> Clone for GatedReader<R> {
|
||||
|
@ -27,7 +24,7 @@ impl<R: AssetReader + Clone> Clone for GatedReader<R> {
|
|||
|
||||
/// Opens path "gates" for a [`GatedReader`].
|
||||
pub struct GateOpener {
|
||||
gates: Arc<RwLock<HashMap<PathBuf, (Sender<()>, Receiver<()>)>>>,
|
||||
gates: Arc<RwLock<HashMap<Box<Path>, (Sender<()>, Receiver<()>)>>>,
|
||||
}
|
||||
|
||||
impl GateOpener {
|
||||
|
@ -36,7 +33,7 @@ impl GateOpener {
|
|||
pub fn open<P: AsRef<Path>>(&self, path: P) {
|
||||
let mut gates = self.gates.write();
|
||||
let gates = gates
|
||||
.entry(path.as_ref().to_path_buf())
|
||||
.entry_ref(path.as_ref())
|
||||
.or_insert_with(crossbeam_channel::unbounded);
|
||||
gates.0.send(()).unwrap();
|
||||
}
|
||||
|
@ -58,42 +55,31 @@ impl<R: AssetReader> GatedReader<R> {
|
|||
}
|
||||
|
||||
impl<R: AssetReader> AssetReader for GatedReader<R> {
|
||||
fn read<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
async fn read<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
let receiver = {
|
||||
let mut gates = self.gates.write();
|
||||
let gates = gates
|
||||
.entry(path.to_path_buf())
|
||||
.entry_ref(path.as_ref())
|
||||
.or_insert_with(crossbeam_channel::unbounded);
|
||||
gates.1.clone()
|
||||
};
|
||||
Box::pin(async move {
|
||||
receiver.recv().unwrap();
|
||||
let result = self.reader.read(path).await?;
|
||||
Ok(result)
|
||||
})
|
||||
receiver.recv().unwrap();
|
||||
let result = self.reader.read(path).await?;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn read_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
self.reader.read_meta(path)
|
||||
async fn read_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
self.reader.read_meta(path).await
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
async fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<PathStream>, AssetReaderError>> {
|
||||
self.reader.read_directory(path)
|
||||
) -> Result<Box<PathStream>, AssetReaderError> {
|
||||
self.reader.read_directory(path).await
|
||||
}
|
||||
|
||||
fn is_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<bool, AssetReaderError>> {
|
||||
self.reader.is_directory(path)
|
||||
async fn is_directory<'a>(&'a self, path: &'a Path) -> Result<bool, AssetReaderError> {
|
||||
self.reader.is_directory(path).await
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
use crate::io::{AssetReader, AssetReaderError, PathStream, Reader};
|
||||
use bevy_utils::{BoxedFuture, HashMap};
|
||||
use futures_io::AsyncRead;
|
||||
use bevy_utils::HashMap;
|
||||
use futures_io::{AsyncRead, AsyncSeek};
|
||||
use futures_lite::{ready, Stream};
|
||||
use parking_lot::RwLock;
|
||||
use std::io::SeekFrom;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
|
@ -12,9 +13,9 @@ use std::{
|
|||
|
||||
#[derive(Default, Debug)]
|
||||
struct DirInternal {
|
||||
assets: HashMap<String, Data>,
|
||||
metadata: HashMap<String, Data>,
|
||||
dirs: HashMap<String, Dir>,
|
||||
assets: HashMap<Box<str>, Data>,
|
||||
metadata: HashMap<Box<str>, Data>,
|
||||
dirs: HashMap<Box<str>, Dir>,
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
|
@ -46,7 +47,7 @@ impl Dir {
|
|||
dir = self.get_or_insert_dir(parent);
|
||||
}
|
||||
dir.0.write().assets.insert(
|
||||
path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
path.file_name().unwrap().to_string_lossy().into(),
|
||||
Data {
|
||||
value: value.into(),
|
||||
path: path.to_owned(),
|
||||
|
@ -60,7 +61,7 @@ impl Dir {
|
|||
dir = self.get_or_insert_dir(parent);
|
||||
}
|
||||
dir.0.write().metadata.insert(
|
||||
path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
path.file_name().unwrap().to_string_lossy().into(),
|
||||
Data {
|
||||
value: value.into(),
|
||||
path: path.to_owned(),
|
||||
|
@ -73,7 +74,7 @@ impl Dir {
|
|||
let mut full_path = PathBuf::new();
|
||||
for c in path.components() {
|
||||
full_path.push(c);
|
||||
let name = c.as_os_str().to_string_lossy().to_string();
|
||||
let name = c.as_os_str().to_string_lossy().into();
|
||||
dir = {
|
||||
let dirs = &mut dir.0.write().dirs;
|
||||
dirs.entry(name)
|
||||
|
@ -147,7 +148,12 @@ impl Stream for DirStream {
|
|||
let dir = this.dir.0.read();
|
||||
|
||||
let dir_index = this.dir_index;
|
||||
if let Some(dir_path) = dir.dirs.keys().nth(dir_index).map(|d| dir.path.join(d)) {
|
||||
if let Some(dir_path) = dir
|
||||
.dirs
|
||||
.keys()
|
||||
.nth(dir_index)
|
||||
.map(|d| dir.path.join(d.as_ref()))
|
||||
{
|
||||
this.dir_index += 1;
|
||||
Poll::Ready(Some(dir_path))
|
||||
} else {
|
||||
|
@ -231,63 +237,88 @@ impl AsyncRead for DataReader {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsyncSeek for DataReader {
|
||||
fn poll_seek(
|
||||
mut self: Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
pos: SeekFrom,
|
||||
) -> Poll<std::io::Result<u64>> {
|
||||
let result = match pos {
|
||||
SeekFrom::Start(offset) => offset.try_into(),
|
||||
SeekFrom::End(offset) => self
|
||||
.data
|
||||
.value()
|
||||
.len()
|
||||
.try_into()
|
||||
.map(|len: i64| len - offset),
|
||||
SeekFrom::Current(offset) => self
|
||||
.bytes_read
|
||||
.try_into()
|
||||
.map(|bytes_read: i64| bytes_read + offset),
|
||||
};
|
||||
|
||||
if let Ok(new_pos) = result {
|
||||
if new_pos < 0 {
|
||||
Poll::Ready(Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"seek position is out of range",
|
||||
)))
|
||||
} else {
|
||||
self.bytes_read = new_pos as _;
|
||||
|
||||
Poll::Ready(Ok(new_pos as _))
|
||||
}
|
||||
} else {
|
||||
Poll::Ready(Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"seek position is out of range",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AssetReader for MemoryAssetReader {
|
||||
fn read<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
self.root
|
||||
.get_asset(path)
|
||||
.map(|data| {
|
||||
let reader: Box<Reader> = Box::new(DataReader {
|
||||
data,
|
||||
bytes_read: 0,
|
||||
});
|
||||
reader
|
||||
})
|
||||
.ok_or_else(|| AssetReaderError::NotFound(path.to_path_buf()))
|
||||
})
|
||||
async fn read<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
self.root
|
||||
.get_asset(path)
|
||||
.map(|data| {
|
||||
let reader: Box<Reader> = Box::new(DataReader {
|
||||
data,
|
||||
bytes_read: 0,
|
||||
});
|
||||
reader
|
||||
})
|
||||
.ok_or_else(|| AssetReaderError::NotFound(path.to_path_buf()))
|
||||
}
|
||||
|
||||
fn read_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<Reader<'a>>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
self.root
|
||||
.get_metadata(path)
|
||||
.map(|data| {
|
||||
let reader: Box<Reader> = Box::new(DataReader {
|
||||
data,
|
||||
bytes_read: 0,
|
||||
});
|
||||
reader
|
||||
})
|
||||
.ok_or_else(|| AssetReaderError::NotFound(path.to_path_buf()))
|
||||
})
|
||||
async fn read_meta<'a>(&'a self, path: &'a Path) -> Result<Box<Reader<'a>>, AssetReaderError> {
|
||||
self.root
|
||||
.get_metadata(path)
|
||||
.map(|data| {
|
||||
let reader: Box<Reader> = Box::new(DataReader {
|
||||
data,
|
||||
bytes_read: 0,
|
||||
});
|
||||
reader
|
||||
})
|
||||
.ok_or_else(|| AssetReaderError::NotFound(path.to_path_buf()))
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
async fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<Box<PathStream>, AssetReaderError>> {
|
||||
Box::pin(async move {
|
||||
self.root
|
||||
.get_dir(path)
|
||||
.map(|dir| {
|
||||
let stream: Box<PathStream> = Box::new(DirStream::new(dir));
|
||||
stream
|
||||
})
|
||||
.ok_or_else(|| AssetReaderError::NotFound(path.to_path_buf()))
|
||||
})
|
||||
) -> Result<Box<PathStream>, AssetReaderError> {
|
||||
self.root
|
||||
.get_dir(path)
|
||||
.map(|dir| {
|
||||
let stream: Box<PathStream> = Box::new(DirStream::new(dir));
|
||||
stream
|
||||
})
|
||||
.ok_or_else(|| AssetReaderError::NotFound(path.to_path_buf()))
|
||||
}
|
||||
|
||||
fn is_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> BoxedFuture<'a, Result<bool, AssetReaderError>> {
|
||||
Box::pin(async move { Ok(self.root.get_dir(path).is_some()) })
|
||||
async fn is_directory<'a>(&'a self, path: &'a Path) -> Result<bool, AssetReaderError> {
|
||||
Ok(self.root.get_dir(path).is_some())
|
||||
}
|
||||
}
|
||||
|
||||
|
|