mirror of
https://github.com/bevyengine/bevy
synced 2024-11-10 07:04:33 +00:00
Merge branch 'main' into brp
This commit is contained in:
commit
2d71527062
602 changed files with 30786 additions and 17936 deletions
|
@ -7,9 +7,9 @@
|
|||
#
|
||||
# ## LLD
|
||||
#
|
||||
# LLD is a linker from the LLVM project that supports Linux, Windows, MacOS, and WASM. It has the greatest
|
||||
# LLD is a linker from the LLVM project that supports Linux, Windows, macOS, and Wasm. It has the greatest
|
||||
# platform support and the easiest installation process. It is enabled by default in this file for Linux
|
||||
# and Windows. On MacOS, the default linker yields higher performance than LLD and is used instead.
|
||||
# and Windows. On macOS, the default linker yields higher performance than LLD and is used instead.
|
||||
#
|
||||
# To install, please scroll to the corresponding table for your target (eg. `[target.x86_64-pc-windows-msvc]`
|
||||
# for Windows) and follow the steps under `LLD linker`.
|
||||
|
@ -25,7 +25,7 @@
|
|||
# your corresponding target, disable LLD by commenting out its `-Clink-arg=...` line, and enable Mold by
|
||||
# *uncommenting* its `-Clink-arg=...` line.
|
||||
#
|
||||
# There is a fork of Mold named Sold that supports MacOS, but it is unmaintained and is about the same speed as
|
||||
# There is a fork of Mold named Sold that supports macOS, but it is unmaintained and is about the same speed as
|
||||
# the default ld64 linker. For this reason, it is not included in this file.
|
||||
#
|
||||
# For more information, please see Mold's repository at <https://github.com/rui314/mold>.
|
||||
|
@ -83,12 +83,21 @@ rustflags = [
|
|||
# - Ubuntu: `sudo apt-get install mold clang`
|
||||
# - Fedora: `sudo dnf install mold clang`
|
||||
# - Arch: `sudo pacman -S mold clang`
|
||||
# "-Clink-arg=-fuse-ld=/usr/bin/mold",
|
||||
# "-Clink-arg=-fuse-ld=mold",
|
||||
|
||||
# Nightly
|
||||
# "-Zshare-generics=y",
|
||||
# "-Zthreads=0",
|
||||
]
|
||||
# Some systems may experience linker performance issues when running doc tests.
|
||||
# See https://github.com/bevyengine/bevy/issues/12207 for details.
|
||||
rustdocflags = [
|
||||
# LLD linker
|
||||
"-Clink-arg=-fuse-ld=lld",
|
||||
|
||||
# Mold linker
|
||||
# "-Clink-arg=-fuse-ld=mold",
|
||||
]
|
||||
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = [
|
||||
|
@ -142,7 +151,7 @@ rustflags = [
|
|||
]
|
||||
|
||||
# Optional: Uncommenting the following improves compile times, but reduces the amount of debug info to 'line number tables only'
|
||||
# In most cases the gains are negligible, but if you are on macos and have slow compile times you should see significant gains.
|
||||
# In most cases the gains are negligible, but if you are on macOS and have slow compile times you should see significant gains.
|
||||
# [profile.dev]
|
||||
# debug = 1
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# repository before you can use this action.
|
||||
#
|
||||
# This action will only install dependencies when the current operating system is Linux. It will do
|
||||
# nothing on any other OS (MacOS, Windows).
|
||||
# nothing on any other OS (macOS, Windows).
|
||||
|
||||
name: Install Linux dependencies
|
||||
description: Installs the dependencies necessary to build Bevy on Linux.
|
||||
|
|
39
.github/contributing/engine_style_guide.md
vendored
39
.github/contributing/engine_style_guide.md
vendored
|
@ -1,39 +0,0 @@
|
|||
# Style guide: Engine
|
||||
|
||||
## Contributing
|
||||
|
||||
For more advice on contributing to the engine, see the [relevant section](../../CONTRIBUTING.md#Contributing-code) of `CONTRIBUTING.md`.
|
||||
|
||||
## General guidelines
|
||||
|
||||
1. Prefer granular imports over glob imports like `bevy_ecs::prelude::*`.
|
||||
2. Use a consistent comment style:
|
||||
1. `///` doc comments belong above `#[derive(Trait)]` invocations.
|
||||
2. `//` comments should generally go above the line in question, rather than in-line.
|
||||
3. Avoid `/* */` block comments, even when writing long comments.
|
||||
4. Use \`variable_name\` code blocks in comments to signify that you're referring to specific types and variables.
|
||||
5. Start comments with capital letters. End them with a period if they are sentence-like.
|
||||
3. Use comments to organize long and complex stretches of code that can't sensibly be refactored into separate functions.
|
||||
4. When using [Bevy error codes](https://bevyengine.org/learn/errors/) include a link to the relevant error on the Bevy website in the returned error message `... See: https://bevyengine.org/learn/errors/b0003`.
|
||||
|
||||
## Rust API guidelines
|
||||
|
||||
As a reference for our API development we are using the [Rust API guidelines][Rust API guidelines]. Generally, these should be followed, except for the following areas of disagreement:
|
||||
|
||||
### Areas of disagreements
|
||||
|
||||
Some areas mentioned in the [Rust API guidelines][Rust API guidelines] we do not agree with. These areas will be expanded whenever we find something else we do not agree with, so be sure to check these from time to time.
|
||||
|
||||
> All items have a rustdoc example
|
||||
|
||||
- This guideline is too strong and not applicable for everything inside of the Bevy game engine. For functionality that requires more context or needs a more interactive demonstration (such as rendering or input features), make use of the `examples` folder instead.
|
||||
|
||||
> Examples use ?, not try!, not unwrap
|
||||
|
||||
- This guideline is usually reasonable, but not always required.
|
||||
|
||||
> Only smart pointers implement Deref and DerefMut
|
||||
|
||||
- Generally a good rule of thumb, but we're probably going to deliberately violate this for single-element wrapper types like `Life(u32)`. The behavior is still predictable and it significantly improves ergonomics / new user comprehension.
|
||||
|
||||
[Rust API guidelines]: https://rust-lang.github.io/api-guidelines/about.html
|
64
.github/contributing/example_style_guide.md
vendored
64
.github/contributing/example_style_guide.md
vendored
|
@ -1,64 +0,0 @@
|
|||
# Style guide: Examples
|
||||
|
||||
For more advice on writing examples, see the [relevant section](../../CONTRIBUTING.md#writing-examples) of CONTRIBUTING.md.
|
||||
|
||||
## Organization
|
||||
|
||||
1. Examples should live in an appropriate subfolder of `/examples`.
|
||||
2. Examples should be a single file if possible.
|
||||
3. Assets live in `./assets`. Try to avoid adding new assets unless strictly necessary to keep the repo small. Don't add "large" asset files.
|
||||
4. Each example should try to follow this order:
|
||||
1. Imports
|
||||
2. A `fn main()` block
|
||||
3. Example logic
|
||||
5. Try to structure app / plugin construction in the same fashion as the actual code.
|
||||
6. Examples should typically not have tests, as they are not directly reusable by the Bevy user.
|
||||
|
||||
## Stylistic preferences
|
||||
|
||||
1. Use simple, descriptive variable names.
|
||||
1. Avoid names like `MyComponent` in favor of more descriptive terms like `Events`.
|
||||
2. Prefer single letter differentiators like `EventsA` and `EventsB` to nonsense words like `EventsFoo` and `EventsBar`.
|
||||
3. Avoid repeating the type of variables in their name where possible. For example, `Color` should be preferred to `ColorComponent`.
|
||||
2. Prefer glob imports of `bevy::prelude::*` and `bevy::sub_crate::*` over granular imports (for terseness).
|
||||
3. Use a consistent comment style:
|
||||
1. `///` doc comments belong above `#[derive(Trait)]` invocations.
|
||||
2. `//` comments should generally go above the line in question, rather than in-line.
|
||||
3. Avoid `/* */` block comments, even when writing long comments.
|
||||
4. Use \`variable_name\` code blocks in comments to signify that you're referring to specific types and variables.
|
||||
5. Start comments with capital letters; end them with a period if they are sentence-like.
|
||||
4. Use comments to organize long and complex stretches of code that can't sensibly be refactored into separate functions.
|
||||
5. Avoid making variables `pub` unless it is needed for your example.
|
||||
|
||||
## Code conventions
|
||||
|
||||
1. Refactor configurable values ("magic numbers") out into constants with clear names.
|
||||
2. Prefer `for` loops over `.for_each`. The latter is faster (for now), but it is less clear for beginners, less idiomatic, and less flexible.
|
||||
3. Use `.single` and `.single_mut` where appropriate.
|
||||
4. In Queries, prefer `With<T>` filters over actually fetching unused data with `&T`.
|
||||
5. Prefer disjoint queries using `With` and `Without` over param sets when you need more than one query in a single system.
|
||||
6. Prefer structs with named fields over tuple structs except in the case of single-field wrapper types.
|
||||
7. Use enum-labels over string-labels for app / schedule / etc. labels.
|
||||
|
||||
## "Feature" examples
|
||||
|
||||
These examples demonstrate the usage of specific engine features in clear, minimal ways.
|
||||
|
||||
1. Focus on demonstrating exactly one feature in an example
|
||||
2. Try to keep your names divorced from the context of a specific game, and focused on the feature you are demonstrating.
|
||||
3. Where they exist, show good alternative approaches to accomplish the same task and explain why you may prefer one over the other.
|
||||
4. Examples should have a visible effect when run, either in the command line or a graphical window.
|
||||
|
||||
## "Game" examples
|
||||
|
||||
These examples show how to build simple games in Bevy in a cohesive way.
|
||||
|
||||
1. Each of these examples lives in the [/examples/games] folder.
|
||||
2. Aim for minimum but viable status: the game should be playable and not obviously buggy but does not need to be polished, featureful, or terribly fun.
|
||||
3. Focus on code quality and demonstrating good, extensible patterns for users.
|
||||
1. Make good use of enums and states to organize your game logic.
|
||||
2. Keep components as small as possible but no smaller: all of the data on a component should generally be accessed at once.
|
||||
3. Keep systems small: they should have a clear single purpose.
|
||||
4. Avoid duplicating logic across similar entities whenever possible by sharing systems and components.
|
||||
4. Use `///` doc comments to explain what each function / struct does as if the example were part of a polished production codebase.
|
||||
5. Arrange your code into modules within the same file to allow for simple code folding / organization.
|
2
.github/example-run/ambiguity_detection.ron
vendored
Normal file
2
.github/example-run/ambiguity_detection.ron
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
(
|
||||
)
|
|
@ -6,7 +6,7 @@ test.beforeEach(async ({ page }) => {
|
|||
|
||||
const MAX_TIMEOUT_FOR_TEST = 300_000;
|
||||
|
||||
test.describe('WASM example', () => {
|
||||
test.describe('Wasm example', () => {
|
||||
test('Wait for success', async ({ page }, testInfo) => {
|
||||
let start = new Date().getTime();
|
||||
|
||||
|
|
31
.github/workflows/ci.yml
vendored
31
.github/workflows/ci.yml
vendored
|
@ -72,7 +72,7 @@ jobs:
|
|||
run: cargo run -p ci -- lints
|
||||
|
||||
miri:
|
||||
# Explicity use MacOS 14 to take advantage of M1 chip.
|
||||
# Explicity use macOS 14 to take advantage of M1 chip.
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
|
@ -219,7 +219,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@v1.23.2
|
||||
uses: crate-ci/typos@v1.24.1
|
||||
- name: Typos info
|
||||
if: failure()
|
||||
run: |
|
||||
|
@ -231,7 +231,7 @@ jobs:
|
|||
|
||||
|
||||
run-examples-macos-metal:
|
||||
# Explicity use MacOS 14 to take advantage of M1 chip.
|
||||
# Explicity use macOS 14 to take advantage of M1 chip.
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
|
@ -436,28 +436,3 @@ jobs:
|
|||
echo " Example: 'use bevy::sprite::MaterialMesh2dBundle;' instead of 'bevy_internal::sprite::MaterialMesh2dBundle;'"
|
||||
exit 1
|
||||
fi
|
||||
check-cfg:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-check-doc-${{ hashFiles('**/Cargo.toml') }}
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.NIGHTLY_TOOLCHAIN }}
|
||||
- name: Install Linux dependencies
|
||||
uses: ./.github/actions/install-linux-deps
|
||||
with:
|
||||
wayland: true
|
||||
xkb: true
|
||||
- name: Build and check cfg typos
|
||||
# See tools/ci/src/main.rs for the commands this runs
|
||||
run: cargo run -p ci -- cfg-check
|
||||
|
|
20
.github/workflows/docs.yml
vendored
20
.github/workflows/docs.yml
vendored
|
@ -58,8 +58,22 @@ jobs:
|
|||
- name: Build docs
|
||||
env:
|
||||
# needs to be in sync with [package.metadata.docs.rs]
|
||||
RUSTDOCFLAGS: -Zunstable-options --cfg=docsrs
|
||||
run: cargo doc --all-features --no-deps -p bevy -Zunstable-options -Zrustdoc-scrape-examples
|
||||
RUSTFLAGS: --cfg docsrs_dep
|
||||
RUSTDOCFLAGS: -Zunstable-options --cfg=docsrs --generate-link-to-definition
|
||||
run: |
|
||||
cargo doc \
|
||||
-Zunstable-options \
|
||||
-Zrustdoc-scrape-examples \
|
||||
--all-features \
|
||||
--workspace \
|
||||
--no-deps \
|
||||
--document-private-items \
|
||||
--exclude ci \
|
||||
--exclude errors \
|
||||
--exclude bevy_mobile_example \
|
||||
--exclude build-wasm-example \
|
||||
--exclude build-templated-pages \
|
||||
--exclude example-showcase
|
||||
|
||||
# This adds the following:
|
||||
# - A top level redirect to the bevy crate documentation
|
||||
|
@ -69,7 +83,7 @@ jobs:
|
|||
run: |
|
||||
echo "<meta http-equiv=\"refresh\" content=\"0; url=bevy/index.html\">" > target/doc/index.html
|
||||
echo "dev-docs.bevyengine.org" > target/doc/CNAME
|
||||
echo "User-Agent: *\nDisallow: /" > target/doc/robots.txt
|
||||
echo $'User-Agent: *\nDisallow: /' > target/doc/robots.txt
|
||||
rm target/doc/.lock
|
||||
|
||||
- name: Upload site artifact
|
||||
|
|
2
.github/workflows/validation-jobs.yml
vendored
2
.github/workflows/validation-jobs.yml
vendored
|
@ -209,7 +209,7 @@ jobs:
|
|||
npx playwright install --with-deps
|
||||
cd ../..
|
||||
|
||||
- name: First WASM build
|
||||
- name: First Wasm build
|
||||
run: |
|
||||
cargo build --release --example ui --target wasm32-unknown-unknown
|
||||
|
||||
|
|
2
.github/workflows/welcome.yml
vendored
2
.github/workflows/welcome.yml
vendored
|
@ -41,5 +41,5 @@ jobs:
|
|||
repo: context.repo.repo,
|
||||
body: `**Welcome**, new contributor!
|
||||
|
||||
Please make sure you've read our [contributing guide](https://github.com/bevyengine/bevy/blob/main/CONTRIBUTING.md) and we look forward to reviewing your pull request shortly ✨`
|
||||
Please make sure you've read our [contributing guide](https://bevyengine.org/learn/contribute/introduction) and we look forward to reviewing your pull request shortly ✨`
|
||||
})
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,6 +7,7 @@ Cargo.lock
|
|||
/.idea
|
||||
/.vscode
|
||||
/benches/target
|
||||
/tools/compile_fail_utils/target
|
||||
dxcompiler.dll
|
||||
dxil.dll
|
||||
|
||||
|
|
6946
CHANGELOG.md
6946
CHANGELOG.md
File diff suppressed because it is too large
Load diff
466
CONTRIBUTING.md
466
CONTRIBUTING.md
|
@ -1,467 +1,3 @@
|
|||
# Contributing to Bevy
|
||||
|
||||
Hey, so you're interested in contributing to Bevy!
|
||||
Feel free to pitch in on whatever interests you and we'll be happy to help you contribute.
|
||||
|
||||
Check out our community's [Code of Conduct](https://github.com/bevyengine/bevy/blob/main/CODE_OF_CONDUCT.md) and feel free to say hi on [Discord] if you'd like.
|
||||
It's a nice place to chat about Bevy development, ask questions, and get to know the other contributors and users in a less formal setting.
|
||||
|
||||
Read on if you're looking for:
|
||||
|
||||
* The high-level design goals of Bevy.
|
||||
* Conventions and informal practices we follow when developing Bevy.
|
||||
* General advice on good open source collaboration practices.
|
||||
* Concrete ways you can help us, no matter your background or skill level.
|
||||
|
||||
We're thrilled to have you along as we build!
|
||||
|
||||
## Getting oriented
|
||||
|
||||
Bevy, like any general-purpose game engine, is a large project!
|
||||
It can be a bit overwhelming to start, so here's the bird's-eye view.
|
||||
|
||||
The [Bevy Engine Organization](https://github.com/bevyengine) has 4 primary repos:
|
||||
|
||||
1. [**`bevy`**](https://github.com/bevyengine/bevy): This is where the engine itself lives. The bulk of development work occurs here.
|
||||
2. [**`bevy-website`**](https://github.com/bevyengine/bevy-website): Where the [official website](https://bevyengine.org/), release notes, Bevy Book, and Bevy Assets are hosted. It is created using the Zola static site generator.
|
||||
3. [**`bevy-assets`**](https://github.com/bevyengine/bevy-assets): A collection of community-made tutorials, plugins, crates, games, and tools! Make a PR if you want to showcase your projects there!
|
||||
4. [**`rfcs`**](https://github.com/bevyengine/rfcs): A place to collaboratively build and reach consensus on designs for large or controversial features.
|
||||
|
||||
The `bevy` repo itself contains many smaller subcrates. Most of them can be used by themselves and many of them can be modularly replaced. This enables developers to pick and choose the parts of Bevy that they want to use.
|
||||
|
||||
Some crates of interest:
|
||||
|
||||
* [**`bevy_ecs`**](./crates/bevy_ecs): The core data model for Bevy. Most Bevy features are implemented on top of it. It is also fully functional as a stand-alone ECS, which can be very valuable if you're looking to integrate it with other game engines or use it for non-game executables.
|
||||
* [**`bevy_app`**](./crates/bevy_app): The api used to define Bevy Plugins and compose them together into Bevy Apps.
|
||||
* [**`bevy_tasks`**](./crates/bevy_tasks): Our light-weight async executor. This drives most async and parallel code in Bevy.
|
||||
* [**`bevy_render`**](./crates/bevy_render): Our core renderer API. It handles interaction with the GPU, such as the creation of Meshes, Textures, and Shaders. It also exposes a modular Render Graph for composing render pipelines. All 2D and 3D render features are implemented on top of this crate.
|
||||
|
||||
## What we're trying to build
|
||||
|
||||
Bevy is a completely free and open source game engine built in Rust. It currently has the following design goals:
|
||||
|
||||
* **Capable**: Offer a complete 2D and 3D feature set.
|
||||
* **Simple**: Easy for newbies to pick up, but infinitely flexible for power users.
|
||||
* **Data Focused**: Data-oriented architecture using the Entity Component System paradigm.
|
||||
* **Modular**: Use only what you need. Replace what you don't like.
|
||||
* **Fast**: App logic should run quickly, and when possible, in parallel.
|
||||
* **Productive**: Changes should compile quickly ... waiting isn't fun.
|
||||
|
||||
Bevy also currently has the following "development process" goals:
|
||||
|
||||
* **Rapid experimentation over API stability**: We need the freedom to experiment and iterate in order to build the best engine we can. This will change over time as APIs prove their staying power.
|
||||
* **Consistent vision**: The engine needs to feel consistent and cohesive. This takes precedence over democratic and/or decentralized processes. See our [*Bevy Organization doc*](/docs/the_bevy_organization.md) for more details.
|
||||
* **Flexibility over bureaucracy**: Developers should feel productive and unencumbered by development processes.
|
||||
* **Focus**: The Bevy Org should focus on building a small number of features excellently over merging every new community-contributed feature quickly. Sometimes this means pull requests will sit unmerged for a long time. This is the price of focus and we are willing to pay it. Fortunately Bevy is modular to its core. 3rd party plugins are a great way to work around this policy.
|
||||
* **User-facing API ergonomics come first**: Solid user experience should receive significant focus and investment. It should rarely be compromised in the interest of internal implementation details.
|
||||
* **Modularity over deep integration**: Individual crates and features should be "pluggable" whenever possible. Don't tie crates, features, or types together that don't need to be.
|
||||
* **Don't merge everything ... don't merge too early**: Every feature we add increases maintenance burden and compile times. Only merge features that are "generally" useful. Don't merge major changes or new features unless we have relative consensus that the design is correct *and* that we have the developer capacity to support it. When possible, make a 3rd party Plugin / crate first, then consider merging once the API has been tested in the wild. Bevy's modular structure means that the only difference between "official engine features" and "third party plugins" is our endorsement and the repo the code lives in. We should take advantage of that whenever possible.
|
||||
* **Control and consistency over 3rd party code reuse**: Only add a dependency if it is *absolutely* necessary. Every dependency we add decreases our autonomy and consistency. Dependencies also have the potential to increase compile times and risk pulling in sub-dependencies we don't want / need.
|
||||
* **Don't re-invent every wheel**: As a counter to the previous point, don't re-invent everything at all costs. If there is a crate in the Rust ecosystem that is the "de-facto" standard (ex: wgpu, winit, cpal), we should heavily consider using it. Bevy should be a positive force in the ecosystem. We should drive the improvements we need into these core ecosystem crates.
|
||||
* **Rust-first**: Engine and user-facing code should optimize and encourage Rust-only workflows. Adding additional languages increases internal complexity, fractures the Bevy ecosystem, and makes it harder for users to understand the engine. Never compromise a Rust interface in the interest of compatibility with other languages.
|
||||
* **Thoughtful public interfaces over maximal configurability**: Symbols and apis should be private by default. Every public API should be thoughtfully and consistently designed. Don't expose unnecessary internal implementation details. Don't allow users to "shoot themselves in the foot". Favor one "happy path" api over multiple apis for different use cases.
|
||||
* **Welcome new contributors**: Invest in new contributors. Help them fill knowledge and skill gaps. Don't ever gatekeep Bevy development according to notions of required skills or credentials. Help new developers find their niche.
|
||||
* **Civil discourse**: We need to collectively discuss ideas and the best ideas *should* win. But conversations need to remain respectful at all times. Remember that we're all in this together. Always follow our [Code of Conduct](https://github.com/bevyengine/bevy/blob/main/CODE_OF_CONDUCT.md).
|
||||
* **Test what you need to**: Write useful tests. Don't write tests that aren't useful. We *generally* aren't strict about unit testing every line of code. We don't want you to waste your time. But at the same time:
|
||||
* Most new features should have at least one minimal [example](https://github.com/bevyengine/bevy/tree/main/examples). These also serve as simple integration tests, as they are run as part of our CI process.
|
||||
* The more complex or "core" a feature is, the more strict we are about unit tests. Use your best judgement here. We will let you know if your pull request needs more tests. We use [Rust's built in testing framework](https://doc.rust-lang.org/book/ch11-01-writing-tests.html).
|
||||
|
||||
## The Bevy Organization
|
||||
|
||||
The Bevy Organization is the group of people responsible for stewarding the Bevy project. It handles things like merging pull requests, choosing project direction, managing bugs / issues / feature requests, running the Bevy website, controlling access to secrets, defining and enforcing best practices, etc.
|
||||
|
||||
Note that you *do not* need to be a member of the Bevy Organization to contribute to Bevy. Community contributors (this means you) can freely open issues, submit pull requests, and review pull requests.
|
||||
|
||||
Check out our dedicated [Bevy Organization document](/docs/the_bevy_organization.md) to learn more about how we're organized.
|
||||
|
||||
### Classifying PRs
|
||||
|
||||
[Labels](https://github.com/bevyengine/bevy/labels) are our primary tool to organize work.
|
||||
Each label has a prefix denoting its category:
|
||||
|
||||
* **D:** Difficulty. In order, these are:
|
||||
* `D-Trivial`: typos, obviously incorrect one-line bug fixes, code reorganization, renames
|
||||
* `D-Straightforward`: simple bug fixes and API improvements, docs, test and examples
|
||||
* `D-Modest`: new features, refactors, challenging bug fixes
|
||||
* `D-Complex`: rewrites and unusually complex features
|
||||
* When applied to an issue, these labels reflect the estimated level of expertise (not time) required to fix the issue.
|
||||
* When applied to a PR, these labels reflect the estimated level of expertise required to *review* the PR.
|
||||
* The `D-Domain-Expert` and `D-Domain-Agnostic` labels are modifiers, which describe if unusually high or low degrees of domain-specific knowledge are required.
|
||||
* The `D-Unsafe` label is applied to any code that touches `unsafe` Rust, which requires special skills and scrutiny.
|
||||
* **X:** Controversiality. In order, these are:
|
||||
* `X-Uncontroversial`: everyone should agree that this is a good idea
|
||||
* `X-Contentious`: there's real design thought needed to ensure that this is the right path forward
|
||||
* `X-Controversial`: there's active disagreement and/or large-scale architectural implications involved
|
||||
* `X-Blessed`: work that was controversial, but whose controversial (but perhaps not technical) elements have been endorsed by the relevant decision makers.
|
||||
* **A:** Area (e.g. A-Animation, A-ECS, A-Rendering, ...).
|
||||
* **C:** Category (e.g. C-Breaking-Change, C-Code-Quality, C-Docs, ...).
|
||||
* **O:** Operating System (e.g. O-Linux, O-Web, O-Windows, ...).
|
||||
* **P:** Priority (e.g. P-Critical, P-High, ...)
|
||||
* Most work is not explicitly categorized by priority: volunteer work mostly occurs on an ad hoc basis depending on contributor interests
|
||||
* **S:** Status (e.g. S-Blocked, S-Needs-Review, S-Needs-Design, ...).
|
||||
|
||||
The rules for how PRs get merged depend on their classification by controversy and difficulty.
|
||||
More difficult PRs will require more careful review from experts,
|
||||
while more controversial PRs will require rewrites to reduce the costs involved and/or sign-off from Subject Matter Experts and Maintainers.
|
||||
|
||||
When making PRs, try to split out more controversial changes from less controversial ones, in order to make your work easier to review and merge.
|
||||
It is also a good idea to try and split out simple changes from more complex changes if it is not helpful for them to be reviewed together.
|
||||
|
||||
Some things that are reason to apply the [`S-Controversial`] label to a PR:
|
||||
|
||||
1. Changes to a project-wide workflow or style.
|
||||
2. New architecture for a large feature.
|
||||
3. Serious tradeoffs were made.
|
||||
4. Heavy user impact.
|
||||
5. New ways for users to make mistakes (footguns).
|
||||
6. Adding a dependency.
|
||||
7. Touching licensing information (due to level of precision required).
|
||||
8. Adding root-level files (due to the high level of visibility).
|
||||
|
||||
Some things that are reason to apply the [`D-Complex`] label to a PR:
|
||||
|
||||
1. Introduction or modification of soundness relevant code (for example `unsafe` code).
|
||||
2. High levels of technical complexity.
|
||||
3. Large-scale code reorganization.
|
||||
|
||||
Examples of PRs that are not [`S-Controversial`] or [`D-Complex`]:
|
||||
|
||||
* Fixing dead links.
|
||||
* Removing dead code or unused dependencies.
|
||||
* Typo and grammar fixes.
|
||||
* [Add `Mut::reborrow`](https://github.com/bevyengine/bevy/pull/7114).
|
||||
* [Add `Res::clone`](https://github.com/bevyengine/bevy/pull/4109).
|
||||
|
||||
Examples of PRs that are [`S-Controversial`] but not [`D-Complex`]:
|
||||
|
||||
* [Implement and require `#[derive(Component)]` on all component structs](https://github.com/bevyengine/bevy/pull/2254).
|
||||
* [Use default serde impls for Entity](https://github.com/bevyengine/bevy/pull/6194).
|
||||
|
||||
Examples of PRs that are not [`S-Controversial`] but are [`D-Complex`]:
|
||||
|
||||
* [Ensure `Ptr`/`PtrMut`/`OwningPtr` are aligned in debug builds](https://github.com/bevyengine/bevy/pull/7117).
|
||||
* [Replace `BlobVec`'s `swap_scratch` with a `swap_nonoverlapping`](https://github.com/bevyengine/bevy/pull/4853).
|
||||
|
||||
Examples of PRs that are both [`S-Controversial`] and [`D-Complex`]:
|
||||
|
||||
* [bevy_reflect: Binary formats](https://github.com/bevyengine/bevy/pull/6140).
|
||||
|
||||
Some useful pull request queries:
|
||||
|
||||
* [PRs which need reviews and are not `D-Complex`](https://github.com/bevyengine/bevy/pulls?q=is%3Apr+-label%3AD-Complex+-label%3AS-Ready-For-Final-Review+-label%3AS-Blocked++).
|
||||
* [`D-Complex` PRs which need reviews](https://github.com/bevyengine/bevy/pulls?q=is%3Apr+label%3AD-Complex+-label%3AS-Ready-For-Final-Review+-label%3AS-Blocked).
|
||||
|
||||
[`S-Controversial`]: https://github.com/bevyengine/bevy/pulls?q=is%3Aopen+is%3Apr+label%3AS-Controversial
|
||||
[`D-Complex`]: https://github.com/bevyengine/bevy/pulls?q=is%3Aopen+is%3Apr+label%3AD-Complex
|
||||
|
||||
### Prioritizing PRs and issues
|
||||
|
||||
We use [Milestones](https://github.com/bevyengine/bevy/milestones) to track issues and PRs that:
|
||||
|
||||
* Need to be merged/fixed before the next release. This is generally for extremely bad bugs i.e. UB or important functionality being broken.
|
||||
* Would have higher user impact and are almost ready to be merged/fixed.
|
||||
|
||||
There are also two priority labels: [`P-Critical`](https://github.com/bevyengine/bevy/issues?q=is%3Aopen+is%3Aissue+label%3AP-Critical) and [`P-High`](https://github.com/bevyengine/bevy/issues?q=is%3Aopen+is%3Aissue+label%3AP-High) that can be used to find issues and PRs that need to be resolved urgently.
|
||||
|
||||
### Closing PRs and Issues
|
||||
|
||||
From time to time, PRs are unsuitable to be merged in a way that cannot be readily fixed.
|
||||
Rather than leaving these PRs open in limbo indefinitely, they should simply be closed.
|
||||
|
||||
This might happen if:
|
||||
|
||||
1. The PR is spam or malicious.
|
||||
2. The work has already been done elsewhere or is otherwise fully obsolete.
|
||||
3. The PR was successfully adopted.
|
||||
4. The work is particularly low quality, and the author is resistant to coaching.
|
||||
5. The work adds features or abstraction of limited value, especially in a way that could easily be recreated outside of the engine.
|
||||
6. The work has been sitting in review for so long and accumulated so many conflicts that it would be simpler to redo it from scratch.
|
||||
7. The PR is pointlessly large, and should be broken into multiple smaller PRs for easier review.
|
||||
|
||||
PRs that are `S-Adopt-Me` should be left open, but only if they're genuinely more useful to rebase rather than simply use as a reference.
|
||||
|
||||
There are several paths for PRs to be closed:
|
||||
|
||||
1. Obviously, authors may close their own PRs for any reason at any time.
|
||||
2. If a PR is clearly spam or malicious, anyone with triage rights is encouraged to close out the PR and report it to Github.
|
||||
3. If the work has already been done elsewhere, adopted or otherwise obsoleted, anyone with triage rights is encouraged to close out the PR with an explanatory comment.
|
||||
4. Anyone may nominate a PR for closure, by bringing it to the attention of the author and / or one of the SMEs / maintainers. Let them press the button, but this is generally well-received and helpful.
|
||||
5. SMEs or maintainers may and are encouraged to unilaterally close PRs that fall into one or more of the remaining categories.
|
||||
6. In the case of PRs where some members of the community (other than the author) are in favor and some are opposed, any two relevant SMEs or maintainers may act in concert to close the PR.
|
||||
|
||||
When closing a PR, check if it has an issue linked.
|
||||
If it does not, you should strongly consider creating an issue and linking the now-closed PR to help make sure the previous work can be discovered and credited.
|
||||
|
||||
## Making changes to Bevy
|
||||
|
||||
Most changes don't require much "process". If your change is relatively straightforward, just do the following:
|
||||
|
||||
1. A community member (that's you!) creates one of the following:
|
||||
* [GitHub Discussions]: An informal discussion with the community. This is the place to start if you want to propose a feature or specific implementation.
|
||||
* [Issue](https://github.com/bevyengine/bevy/issues): A formal way for us to track a bug or feature. Please look for duplicates before opening a new issue and consider starting with a Discussion.
|
||||
* [Pull Request](https://github.com/bevyengine/bevy/pulls) (or PR for short): A request to merge code changes. This starts our "review process". You are welcome to start with a pull request, but consider starting with an Issue or Discussion for larger changes (or if you aren't certain about a design). We don't want anyone to waste their time on code that didn't have a chance to be merged! But conversely, sometimes PRs are the most efficient way to propose a change. Just use your own judgement here.
|
||||
2. Other community members review and comment in an ad-hoc fashion. Active subject matter experts may be pulled into a thread using `@mentions`. If your PR has been quiet for a while and is ready for review, feel free to leave a message to "bump" the thread, or bring it up on [Discord](https://discord.gg/bevy) in an appropriate engine development channel.
|
||||
3. Once they're content with the pull request (design, code quality, documentation, tests), individual reviewers leave "Approved" reviews.
|
||||
4. After consensus has been reached (typically two approvals from the community or one for extremely simple changes) and CI passes, the [S-Ready-For-Final-Review](https://github.com/bevyengine/bevy/issues?q=is%3Aopen+is%3Aissue+label%3AS-Ready-For-Final-Review) label is added.
|
||||
5. When they find time, someone with merge rights performs a final code review and queue the PR for merging.
|
||||
|
||||
### Complex changes
|
||||
|
||||
Individual contributors often lead major new features and reworks. However these changes require more design work and scrutiny. Complex changes like this tend to go through the following lifecycle:
|
||||
|
||||
1. A need or opportunity is identified and an issue is made, laying out the general problem.
|
||||
2. As needed, this is discussed further on that issue thread, in cross-linked [GitHub Discussion] threads, or on [Discord] in the Engine Development channels.
|
||||
3. Either a Draft Pull Request or an RFC is made. As discussed in the [RFC repo](https://github.com/bevyengine/rfcs), complex features need RFCs, but these can be submitted before or after prototyping work has been started.
|
||||
4. If feasible, parts that work on their own (even if they're only useful once the full complex change is merged) get split out into individual PRs to make them easier to review.
|
||||
5. The community as a whole helps improve the Draft PR and/or RFC, leaving comments, making suggestions, and submitting pull requests to the original branch.
|
||||
6. Once the RFC is merged and/or the Draft Pull Request is transitioned out of draft mode, the [normal change process outlined in the previous section](#making-changes-to-bevy) can begin.
|
||||
|
||||
## How you can help
|
||||
|
||||
If you've made it to this page, you're probably already convinced that Bevy is a project you'd like to see thrive.
|
||||
But how can *you* help?
|
||||
|
||||
No matter your experience level with Bevy or Rust or your level of commitment, there are ways to meaningfully contribute.
|
||||
Take a look at the sections that follow to pick a route (or five) that appeal to you.
|
||||
|
||||
If you ever find yourself at a loss for what to do, or in need of mentorship or advice on how to contribute to Bevy, feel free to ask in [Discord] and one of our more experienced community members will be happy to help.
|
||||
|
||||
### Join a working group
|
||||
|
||||
Active initiatives in Bevy are organized into temporary working groups: choosing one of those and asking how to help can be a fantastic way to get up to speed and be immediately useful.
|
||||
|
||||
Working groups are public, open-membership groups that work together to tackle a broad-but-scoped initiative.
|
||||
The work that they do is coordinated in a forum-channel on [Discord](https://discord.gg/bevy), although they also create issues and may use project boards for tangible work that needs to be done.
|
||||
A list of existing working groups can be found in the [#working-groups](https://discord.com/channels/691052431525675048/1235758970703188008) channel.
|
||||
|
||||
There are no special requirements to be a member, and no formal membership list or leadership.
|
||||
Anyone can help, and you should expect to compromise and work together with others to bring a shared vision to life.
|
||||
Working groups are *spaces*, not clubs.
|
||||
|
||||
### Start a working group
|
||||
|
||||
When tackling a complex initiative, friends and allies can make things go much more smoothly.
|
||||
|
||||
To start a working group:
|
||||
|
||||
1. Decide what the working group is going to focus on. This should be tightly focused and achievable!
|
||||
2. Gather at least 3 people including yourself who are willing to be in the working group.
|
||||
3. Ping the `@Maintainer` role on Discord in [#engine-dev](https://discord.com/channels/691052431525675048/692572690833473578) announcing your mutual intent and a one or two sentence description of your plans.
|
||||
|
||||
The maintainers will briefly evaluate the proposal in consultation with the relevant SMEs and give you a thumbs up or down on whether this is something Bevy can and wants to explore right now.
|
||||
You don't need a concrete plan at this stage, just a sensible argument for both "why is this something that could be useful to Bevy" and "why there aren't any serious barriers in implementing this in the near future".
|
||||
If they're in favor, a maintainer will create a forum channel for you and you're off to the races.
|
||||
|
||||
Your initial task is writing up a design doc: laying out the scope of work and general implementation strategy.
|
||||
Here's a [solid example of a design doc](https://github.com/bevyengine/bevy/issues/12365), although feel free to use whatever format works best for your team.
|
||||
|
||||
Once that's ready, get a sign-off on the broad vision and goals from the appropriate SMEs and maintainers.
|
||||
This is the primary review step: maintainers and SMEs should be broadly patient and supportive even if they're skeptical until a proper design doc is in hand to evaluate.
|
||||
|
||||
With a sign-off in hand, post the design doc to [Github Discussions](https://github.com/bevyengine/bevy/discussions) with the [`C-Design-Doc` label](https://github.com/bevyengine/bevy/discussions?discussions_q=is%3Aopen+label%3A%22C-Design+Doc%22) for archival purposes and begin work on implementation.
|
||||
Post PRs that you need reviews on in your group's forum thread, ask for advice, and share the load.
|
||||
Controversial PRs are still `S-Controversial`, but with a sign-off-in-principle, things should go more smoothly.
|
||||
|
||||
If work peters out and the initiative dies, maintainers can wind down working groups (in consultation with SMEs and the working group itself).
|
||||
This is normal and expected: projects fail for all sorts of reasons!
|
||||
However, it's important to both keep the number of working groups relatively small and ensure they're active:
|
||||
they serve a vital role in onboarding new contributors.
|
||||
|
||||
Once your implementation work laid out in your initial design doc is complete, it's time to wind down the working group.
|
||||
Feel free to make another one though to tackle the next step in your grand vision!
|
||||
|
||||
### Battle-testing Bevy
|
||||
|
||||
Ultimately, Bevy is a tool that's designed to help people make cool games.
|
||||
By using Bevy, you can help us catch bugs, prioritize new features, polish off the rough edges, and promote the project.
|
||||
|
||||
If you need help, don't hesitate to ask for help on [GitHub Discussions], [Discord], or [reddit](https://www.reddit.com/r/bevy). Generally you should prefer asking questions as [GitHub Discussions] as they are more searchable.
|
||||
|
||||
When you think you've found a bug, missing documentation, or a feature that would help you make better games, please [file an issue](https://github.com/bevyengine/bevy/issues/new/choose) on the main `bevy` repo.
|
||||
|
||||
Do your best to search for duplicate issues, but if you're unsure, open a new issue and link to other related issues on the thread you make.
|
||||
|
||||
Once you've made something that you're proud of, feel free to drop a link, video, or screenshot in `#showcase` on [Discord]!
|
||||
If you release a game on [itch.io](https://itch.io/games/tag-bevy) we'd be thrilled if you tagged it with `bevy`.
|
||||
|
||||
### Teaching others
|
||||
|
||||
Bevy is still very young, and light on documentation, tutorials, and accumulated expertise.
|
||||
By helping others with their issues, and teaching them about Bevy, you will naturally learn the engine and codebase in greater depth while also making our community better!
|
||||
|
||||
Some of the best ways to do this are:
|
||||
|
||||
* Answering questions on [GitHub Discussions], [Discord], and [reddit](https://www.reddit.com/r/bevy).
|
||||
* Writing tutorials, guides, and other informal documentation and sharing them on [Bevy Assets](https://github.com/bevyengine/bevy-assets).
|
||||
* Streaming, writing blog posts about creating your game, and creating videos. Share these in the `#devlogs` channel on [Discord]!
|
||||
|
||||
### Writing plugins
|
||||
|
||||
You can improve Bevy's ecosystem by building your own Bevy Plugins and crates.
|
||||
|
||||
Non-trivial, reusable functionality that works well with itself is a good candidate for a plugin.
|
||||
If it's closer to a snippet or design pattern, you may want to share it with the community on [Discord], Reddit, or [GitHub Discussions] instead.
|
||||
|
||||
Check out our [plugin guidelines](https://bevyengine.org/learn/book/plugin-development/) for helpful tips and patterns.
|
||||
|
||||
### Fixing bugs
|
||||
|
||||
Bugs in Bevy (or the associated website / book) are filed on the issue tracker using the [`C-Bug`](https://github.com/bevyengine/bevy/issues?q=is%3Aissue+is%3Aopen+label%3AC-Bug) label.
|
||||
|
||||
If you're looking for an easy place to start, take a look at the [`D-Good-First-Issue`](https://github.com/bevyengine/bevy/issues?q=is%3Aopen+is%3Aissue+label%3AD-Good-First-Issue) label, and feel free to ask questions on that issue's thread in question or on [Discord].
|
||||
You don't need anyone's permission to try fixing a bug or adding a simple feature, but stating that you'd like to tackle an issue can be helpful to avoid duplicated work.
|
||||
|
||||
When you make a pull request that fixes an issue, include a line that says `Fixes #X` (or "Closes"), where `X` is the issue number.
|
||||
This will cause the issue in question to be closed when your PR is merged.
|
||||
|
||||
General improvements to code quality are also welcome!
|
||||
Bevy can always be safer, better tested, and more idiomatic.
|
||||
|
||||
### Writing docs
|
||||
|
||||
Like every other large, rapidly developing open source library you've ever used, Bevy's documentation can always use improvement.
|
||||
This is incredibly valuable, easily distributed work, but requires a bit of guidance:
|
||||
|
||||
* Inaccurate documentation is worse than no documentation: prioritize fixing broken docs.
|
||||
* Bevy is remarkably unstable: before tackling a new major documentation project, check in with the community on Discord or GitHub (making an issue about specific missing docs is a great way to plan) about the stability of that feature and upcoming plans to save yourself heartache.
|
||||
* Code documentation (doc examples and in the examples folder) is easier to maintain because the compiler will tell us when it breaks.
|
||||
* Inline documentation should be technical and to the point. Link relevant examples or other explanations if broader context is useful.
|
||||
* The Bevy book is hosted on the `bevy-website` repo and targeted towards beginners who are just getting to know Bevy (and perhaps Rust!).
|
||||
* Accepted RFCs are not documentation: they serve only as a record of accepted decisions.
|
||||
|
||||
[docs.rs](https://docs.rs/bevy) is built from out of the last release's documentation, which is written right in-line directly above the code it documents.
|
||||
To view the current docs on `main` before you contribute, clone the `bevy` repo, and run `cargo doc --open` or go to [dev-docs.bevyengine.org](https://dev-docs.bevyengine.org/),
|
||||
which has the latest API reference built from the repo on every commit made to the `main` branch.
|
||||
|
||||
### Writing examples
|
||||
|
||||
Most [examples in Bevy](https://github.com/bevyengine/bevy/tree/main/examples) aim to clearly demonstrate a single feature, group of closely related small features, or show how to accomplish a particular task (such as asset loading, creating a custom shader or testing your app).
|
||||
In rare cases, creating new "game" examples is justified in order to demonstrate new features that open a complex class of functionality in a way that's hard to demonstrate in isolation or requires additional integration testing.
|
||||
|
||||
Examples in Bevy should be:
|
||||
|
||||
1. **Working:** They must compile and run, and any introduced errors in them should be obvious (through tests, simple results or clearly displayed behavior).
|
||||
2. **Clear:** They must use descriptive variable names, be formatted, and be appropriately commented. Try your best to showcase best practices when it doesn't obscure the point of the example.
|
||||
3. **Relevant:** They should explain, through comments or variable names, what they do and how this can be useful to a game developer.
|
||||
4. **Minimal:** They should be no larger or complex than is needed to meet the goals of the example.
|
||||
|
||||
When you add a new example, be sure to update `examples/README.md` with the new example and add it to the root `Cargo.toml` file.
|
||||
Run `cargo run -p build-templated-pages -- build-example-page` to do this automatically.
|
||||
Use a generous sprinkling of keywords in your description: these are commonly used to search for a specific example.
|
||||
See the [example style guide](.github/contributing/example_style_guide.md) to help make sure the style of your example matches what we're already using.
|
||||
|
||||
More complex demonstrations of functionality are also welcome, but these should be submitted to [bevy-assets](https://github.com/bevyengine/bevy-assets).
|
||||
|
||||
### Reviewing others' work
|
||||
|
||||
With the sheer volume of activity in Bevy's community, reviewing others work with the aim of improving it is one of the most valuable things you can do.
|
||||
You don't need to be an Elder Rustacean to be useful here: anyone can catch missing tests, unclear docs, logic errors, and so on.
|
||||
If you have specific skills (e.g. advanced familiarity with `unsafe` code, rendering knowledge or web development experience) or personal experience with a problem, try to prioritize those areas to ensure we can get appropriate expertise where we need it.
|
||||
|
||||
When you find (or make) a PR that you don't feel comfortable reviewing, but you *can* think of someone who does, consider using Github's "Request review" functionality (in the top-right of the PR screen) to bring the work to their attention.
|
||||
If they're not a Bevy Org member, you'll need to ping them in the thread directly: that's fine too!
|
||||
Almost everyone working on Bevy is a volunteer: this should be treated as a gentle nudge, rather than an assignment of work.
|
||||
Consider checking the Git history for appropriate reviewers, or ask on Discord for suggestions.
|
||||
|
||||
Focus on giving constructive, actionable feedback that results in real improvements to code quality or end-user experience.
|
||||
If you don't understand why an approach was taken, please ask!
|
||||
|
||||
Provide actual code suggestions when that is helpful. Small changes work well as comments or in-line suggestions on specific lines of codes.
|
||||
Larger changes deserve a comment in the main thread, or a pull request to the original author's branch (but please mention that you've made one).
|
||||
When in doubt about a matter of architectural philosophy, refer back to [*What we're trying to build*](#what-were-trying-to-build) for guidance.
|
||||
|
||||
Once you're happy with the work and feel you're reasonably qualified to assess quality in this particular area, leave your `Approved` review on the PR.
|
||||
If you're new to GitHub, check out the [Pull Request Review documentation](https://docs.github.com/en/github/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews).
|
||||
**Anyone** can and should leave reviews ... no special permissions are required!
|
||||
|
||||
It's okay to leave an approval even if you aren't 100% confident on all areas of the PR: just be sure to note your limitations.
|
||||
When maintainers are evaluating the PR to be merged, they'll make sure that there's good coverage on all of the critical areas.
|
||||
If you can only check that the math is correct, and another reviewer can check everything *but* the math, we're in good shape!
|
||||
|
||||
Similarly, if there are areas that would be *good* to fix but aren't severe, please consider leaving an approval.
|
||||
The author can address them immediately, or spin it out into follow-up issues or PRs.
|
||||
Large PRs are much more draining for both reviewers and authors, so try to push for a smaller scope with clearly tracked follow-ups.
|
||||
|
||||
There are three main places you can check for things to review:
|
||||
|
||||
1. Pull requests which are ready and in need of more reviews on [bevy](https://github.com/bevyengine/bevy/pulls?q=is%3Aopen+is%3Apr+-label%3AS-Ready-For-Final-Review+-draft%3A%3Atrue+-label%3AS-Needs-RFC+-reviewed-by%3A%40me+-author%3A%40me).
|
||||
2. Pull requests on [bevy](https://github.com/bevyengine/bevy/pulls) and the [bevy-website](https://github.com/bevyengine/bevy-website/pulls) repos.
|
||||
3. [RFCs](https://github.com/bevyengine/rfcs), which need extensive thoughtful community input on their design.
|
||||
|
||||
Not even our Project Leads and Maintainers are exempt from reviews and RFCs!
|
||||
By giving feedback on this work (and related supporting work), you can help us make sure our releases are both high-quality and timely.
|
||||
|
||||
Finally, if nothing brings you more satisfaction than seeing every last issue labeled and all resolved issues closed, feel free to message the Project Lead (currently @cart) for a Bevy org role to help us keep things tidy.
|
||||
As discussed in our [*Bevy Organization doc*](/docs/the_bevy_organization.md), this role only requires good faith and a basic understanding of our development process.
|
||||
|
||||
### How to adopt pull requests
|
||||
|
||||
Occasionally authors of pull requests get busy or become unresponsive, or project members fail to reply in a timely manner.
|
||||
This is a natural part of any open source project.
|
||||
To avoid blocking these efforts, these pull requests may be *adopted*, where another contributor creates a new pull request with the same content.
|
||||
If there is an old pull request that is without updates, comment to the organization whether it is appropriate to add the
|
||||
*[S-Adopt-Me](https://github.com/bevyengine/bevy/labels/S-Adopt-Me)* label, to indicate that it can be *adopted*.
|
||||
If you plan on adopting a PR yourself, you can also leave a comment on the PR asking the author if they plan on returning.
|
||||
If the author gives permission or simply doesn't respond after a few days, then it can be adopted.
|
||||
This may sometimes even skip the labeling process since at that point the PR has been adopted by you.
|
||||
|
||||
With this label added, it's best practice to fork the original author's branch.
|
||||
This ensures that they still get credit for working on it and that the commit history is retained.
|
||||
When the new pull request is ready, it should reference the original PR in the description.
|
||||
Then notify org members to close the original.
|
||||
|
||||
* For example, you can reference the original PR by adding the following to your PR description:
|
||||
|
||||
`Adopted #number-original-pull-request`
|
||||
|
||||
### Contributing code
|
||||
|
||||
Bevy is actively open to code contributions from community members.
|
||||
If you're new to Bevy, here's the workflow we use:
|
||||
|
||||
1. Fork the `bevyengine/bevy` repository on GitHub. You'll need to create a GitHub account if you don't have one already.
|
||||
2. Make your changes in a local clone of your fork, typically in its own new branch.
|
||||
1. Try to split your work into separate commits, each with a distinct purpose. Be particularly mindful of this when responding to reviews so it's easy to see what's changed.
|
||||
2. Tip: [You can set up a global `.gitignore` file](https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files#configuring-ignored-files-for-all-repositories-on-your-computer) to exclude your operating system/text editor's special/temporary files. (e.g. `.DS_Store`, `thumbs.db`, `*~`, `*.swp` or `*.swo`) This allows us to keep the `.gitignore` file in the repo uncluttered.
|
||||
3. To test CI validations locally, run the `cargo run -p ci` command. This will run most checks that happen in CI, but can take some time. You can also run sub-commands to iterate faster depending on what you're contributing:
|
||||
* `cargo run -p ci -- lints` - to run formatting and clippy.
|
||||
* `cargo run -p ci -- test` - to run tests.
|
||||
* `cargo run -p ci -- doc` - to run doc tests and doc checks.
|
||||
* `cargo run -p ci -- compile` - to check that everything that must compile still does (examples and benches), and that some that shouldn't still don't ([`crates/bevy_ecs_compile_fail_tests`](./crates/bevy_ecs_compile_fail_tests)).
|
||||
* to get more information on commands available and what is run, check the [tools/ci crate](./tools/ci).
|
||||
4. When working with Markdown (`.md`) files, Bevy's CI will check markdown files (like this one) using [markdownlint](https://github.com/DavidAnson/markdownlint).
|
||||
To locally lint your files using the same workflow as our CI:
|
||||
1. Install [markdownlint-cli](https://github.com/igorshubovych/markdownlint-cli).
|
||||
2. Run `markdownlint -f -c .github/linters/.markdown-lint.yml .` in the root directory of the Bevy project.
|
||||
5. When working with Toml (`.toml`) files, Bevy's CI will check toml files using [taplo](https://taplo.tamasfe.dev/): `taplo fmt --check --diff`
|
||||
1. If you use VSCode, install [Even better toml](https://marketplace.visualstudio.com/items?itemName=tamasfe.even-better-toml) and format your files.
|
||||
2. If you want to use the cli tool, install [taplo-cli](https://taplo.tamasfe.dev/cli/installation/cargo.html) and run `taplo fmt --check --diff` to check for the formatting. Fix any issues by running `taplo fmt` in the root directory of the Bevy project.
|
||||
6. Check for typos. Bevy's CI will check for them using [typos](https://github.com/crate-ci/typos).
|
||||
1. If you use VSCode, install [Typos Spell Checker](https://marketplace.visualstudio.com/items?itemName=tekumara.typos-vscode).
|
||||
2. You can also use the cli tool. Install [typos-cli](https://github.com/crate-ci/typos?tab=readme-ov-file#install) and run `typos` to check for typos, and fix them by running `typos -w`.
|
||||
7. Push your changes to your fork on Github and open a Pull Request.
|
||||
8. Respond to any CI failures or review feedback. While CI failures must be fixed before we can merge your PR, you do not need to *agree* with all feedback from your reviews, merely acknowledge that it was given. If you cannot come to an agreement, leave the thread open and defer to a Maintainer or Project Lead's final judgement.
|
||||
9. When your PR is ready to merge, a Maintainer or Project Lead will review it and suggest final changes. If those changes are minimal they may even apply them directly to speed up merging.
|
||||
|
||||
If you end up adding a new official Bevy crate to the `bevy` repo:
|
||||
|
||||
1. Add the new crate to the [./tools/publish.sh](./tools/publish.sh) file.
|
||||
2. Check if a new cargo feature was added, update [cargo_features.md](https://github.com/bevyengine/bevy/blob/main/docs/cargo_features.md) as needed.
|
||||
|
||||
When contributing, please:
|
||||
|
||||
* Try to loosely follow the workflow in [*Making changes to Bevy*](#making-changes-to-bevy).
|
||||
* Consult the [style guide](.github/contributing/engine_style_guide.md) to help keep our code base tidy.
|
||||
* Explain what you're doing and why.
|
||||
* Document new code with doc comments.
|
||||
* Include clear, simple tests.
|
||||
* Add or improve the examples when adding new user-facing functionality.
|
||||
* Break work into digestible chunks.
|
||||
* Ask for any help that you need!
|
||||
|
||||
Your first PR will be merged in no time!
|
||||
|
||||
No matter how you're helping: thanks for contributing to Bevy!
|
||||
|
||||
[GitHub Discussions]: https://github.com/bevyengine/bevy/discussions "GitHub Discussions"
|
||||
[Discord]: https://discord.gg/bevy "Discord"
|
||||
Hey, we've moved our information on contributing to Bevy's website [here](https://bevyengine.org/learn/contribute/introduction). Go give it a read, and thanks for contributing!
|
||||
|
|
156
Cargo.toml
156
Cargo.toml
|
@ -31,14 +31,14 @@ members = [
|
|||
]
|
||||
|
||||
[workspace.lints.clippy]
|
||||
type_complexity = "allow"
|
||||
doc_markdown = "warn"
|
||||
manual_let_else = "warn"
|
||||
undocumented_unsafe_blocks = "warn"
|
||||
redundant_else = "warn"
|
||||
match_same_arms = "warn"
|
||||
semicolon_if_nothing_returned = "warn"
|
||||
redundant_closure_for_method_calls = "warn"
|
||||
redundant_else = "warn"
|
||||
semicolon_if_nothing_returned = "warn"
|
||||
type_complexity = "allow"
|
||||
undocumented_unsafe_blocks = "warn"
|
||||
unwrap_or_default = "warn"
|
||||
|
||||
ptr_as_ptr = "warn"
|
||||
|
@ -46,9 +46,11 @@ ptr_cast_constness = "warn"
|
|||
ref_as_ptr = "warn"
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_op_in_unsafe_fn = "warn"
|
||||
missing_docs = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs_dep)'] }
|
||||
unsafe_code = "deny"
|
||||
unsafe_op_in_unsafe_fn = "warn"
|
||||
unused_qualifications = "warn"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -111,9 +113,6 @@ bevy_core_pipeline = [
|
|||
"bevy_render",
|
||||
]
|
||||
|
||||
# Plugin for dynamic loading (using [libloading](https://crates.io/crates/libloading))
|
||||
bevy_dynamic_plugin = ["bevy_internal/bevy_dynamic_plugin"]
|
||||
|
||||
# Adds gamepad support
|
||||
bevy_gilrs = ["bevy_internal/bevy_gilrs"]
|
||||
|
||||
|
@ -185,9 +184,6 @@ trace_tracy_memory = [
|
|||
# Tracing support
|
||||
trace = ["bevy_internal/trace"]
|
||||
|
||||
# Save a trace of all wgpu calls
|
||||
wgpu_trace = ["bevy_internal/wgpu_trace"]
|
||||
|
||||
# EXR image format support
|
||||
exr = ["bevy_internal/exr"]
|
||||
|
||||
|
@ -352,13 +348,16 @@ ios_simulator = ["bevy_internal/ios_simulator"]
|
|||
# Enable built in global state machines
|
||||
bevy_state = ["bevy_internal/bevy_state"]
|
||||
|
||||
# Enables source location tracking for change detection, which can assist with debugging
|
||||
track_change_detection = ["bevy_internal/track_change_detection"]
|
||||
|
||||
# Enable function reflection
|
||||
reflect_functions = ["bevy_internal/reflect_functions"]
|
||||
|
||||
[dependencies]
|
||||
bevy_internal = { path = "crates/bevy_internal", version = "0.15.0-dev", default-features = false }
|
||||
|
||||
# WASM does not support dynamic linking.
|
||||
# Wasm does not support dynamic linking.
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
bevy_dylib = { path = "crates/bevy_dylib", version = "0.15.0-dev", default-features = false, optional = true }
|
||||
|
||||
|
@ -370,6 +369,7 @@ flate2 = "1.0"
|
|||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
bytemuck = "1.7"
|
||||
bevy_render = { path = "crates/bevy_render", version = "0.15.0-dev", default-features = false }
|
||||
# Needed to poll Task examples
|
||||
futures-lite = "2.0.1"
|
||||
async-std = "1.12"
|
||||
|
@ -608,6 +608,17 @@ description = "Demonstrates transparency in 2d"
|
|||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "mesh2d_alpha_mode"
|
||||
path = "examples/2d/mesh2d_alpha_mode.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.mesh2d_alpha_mode]
|
||||
name = "Mesh2d Alpha Mode"
|
||||
description = "Used to test alpha modes with mesh2d"
|
||||
category = "2D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "pixel_grid_snap"
|
||||
path = "examples/2d/pixel_grid_snap.rs"
|
||||
|
@ -772,7 +783,8 @@ doc-scrape-examples = true
|
|||
name = "Lines"
|
||||
description = "Create a custom material to draw 3d lines"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
# Wasm does not support the `POLYGON_MODE_LINE` feature.
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "ssao"
|
||||
|
@ -1076,7 +1088,7 @@ setup = [
|
|||
"curl",
|
||||
"-o",
|
||||
"assets/models/bunny.meshlet_mesh",
|
||||
"https://raw.githubusercontent.com/JMS55/bevy_meshlet_asset/bd869887bc5c9c6e74e353f657d342bef84bacd8/bunny.meshlet_mesh",
|
||||
"https://raw.githubusercontent.com/JMS55/bevy_meshlet_asset/b6c712cfc87c65de419f856845401aba336a7bcd/bunny.meshlet_mesh",
|
||||
],
|
||||
]
|
||||
|
||||
|
@ -1603,6 +1615,17 @@ description = "Shows how to create a custom diagnostic"
|
|||
category = "Diagnostics"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "enabling_disabling_diagnostic"
|
||||
path = "examples/diagnostics/enabling_disabling_diagnostic.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.enabling_disabling_diagnostic]
|
||||
name = "Enabling/disabling diagnostic"
|
||||
description = "Shows how to disable/re-enable a Diagnostic during runtime"
|
||||
category = "Diagnostics"
|
||||
wasm = true
|
||||
|
||||
# ECS (Entity Component System)
|
||||
[[example]]
|
||||
name = "ecs_guide"
|
||||
|
@ -1622,13 +1645,14 @@ category = "ECS (Entity Component System)"
|
|||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "component_change_detection"
|
||||
path = "examples/ecs/component_change_detection.rs"
|
||||
name = "change_detection"
|
||||
path = "examples/ecs/change_detection.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["track_change_detection"]
|
||||
|
||||
[package.metadata.example.component_change_detection]
|
||||
name = "Component Change Detection"
|
||||
description = "Change detection on components"
|
||||
[package.metadata.example.change_detection]
|
||||
name = "Change Detection"
|
||||
description = "Change detection on components and resources"
|
||||
category = "ECS (Entity Component System)"
|
||||
wasm = false
|
||||
|
||||
|
@ -2404,6 +2428,17 @@ description = "A shader that shows how to bind and sample multiple textures as a
|
|||
category = "Shaders"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "specialized_mesh_pipeline"
|
||||
path = "examples/shader/specialized_mesh_pipeline.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.specialized_mesh_pipeline]
|
||||
name = "Specialized Mesh Pipeline"
|
||||
description = "Demonstrates how to write a specialized mesh pipeline"
|
||||
category = "Shaders"
|
||||
wasm = true
|
||||
|
||||
# Stress tests
|
||||
[[package.metadata.example_category]]
|
||||
name = "Stress Tests"
|
||||
|
@ -2993,6 +3028,14 @@ description = "Demonstrates customizing default window settings"
|
|||
category = "Window"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "ambiguity_detection"
|
||||
path = "tests/ecs/ambiguity_detection.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.ambiguity_detection]
|
||||
hidden = true
|
||||
|
||||
[[example]]
|
||||
name = "resizing"
|
||||
path = "tests/window/resizing.rs"
|
||||
|
@ -3050,6 +3093,17 @@ description = "Demonstrates creating and using custom Ui materials"
|
|||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "cubic_splines"
|
||||
path = "examples/math/cubic_splines.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.cubic_splines]
|
||||
name = "Cubic Splines"
|
||||
description = "Exhibits different modes of constructing cubic curves using splines"
|
||||
category = "Math"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "render_primitives"
|
||||
path = "examples/math/render_primitives.rs"
|
||||
|
@ -3308,6 +3362,17 @@ description = "Demonstrates fog volumes"
|
|||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "scrolling_fog"
|
||||
path = "examples/3d/scrolling_fog.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.scrolling_fog]
|
||||
name = "Scrolling fog"
|
||||
description = "Demonstrates how to create the effect of fog moving in the wind"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "physics_in_fixed_timestep"
|
||||
path = "examples/movement/physics_in_fixed_timestep.rs"
|
||||
|
@ -3330,6 +3395,41 @@ description = "Demonstrates the built-in postprocessing features"
|
|||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "rotate_environment_map"
|
||||
path = "examples/3d/rotate_environment_map.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["pbr_multi_layer_material_textures"]
|
||||
|
||||
[package.metadata.example.rotate_environment_map]
|
||||
name = "Rotate Environment Map"
|
||||
description = "Demonstrates how to rotate the skybox and the environment map simultaneously"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "simple_picking"
|
||||
path = "examples/picking/simple_picking.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["bevy_picking"]
|
||||
|
||||
[package.metadata.example.simple_picking]
|
||||
name = "Showcases simple picking events and usage"
|
||||
description = "Demonstrates how to use picking events to spawn simple objects"
|
||||
category = "Picking"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "sprite_picking"
|
||||
path = "examples/picking/sprite_picking.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.sprite_picking]
|
||||
name = "Sprite Picking"
|
||||
description = "Demonstrates picking sprites and sprite atlases"
|
||||
category = "Picking"
|
||||
wasm = true
|
||||
|
||||
[profile.wasm-release]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
|
@ -3342,6 +3442,22 @@ lto = "fat"
|
|||
panic = "abort"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
# This cfg is needed so that #[doc(fake_variadic)] is correctly propagated for
|
||||
# impls for re-exported traits. See https://github.com/rust-lang/cargo/issues/8811
|
||||
# for details on why this is needed. Since dependencies don't expect to be built
|
||||
# with `--cfg docsrs` (and thus fail to compile) we use a different cfg.
|
||||
rustc-args = ["--cfg", "docsrs_dep"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]
|
||||
|
||||
[[example]]
|
||||
name = "monitor_info"
|
||||
path = "examples/window/monitor_info.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.monitor_info]
|
||||
name = "Monitor info"
|
||||
description = "Displays information about available monitors (displays)."
|
||||
category = "Window"
|
||||
wasm = false
|
||||
|
|
48
assets/shaders/specialized_mesh_pipeline.wgsl
Normal file
48
assets/shaders/specialized_mesh_pipeline.wgsl
Normal file
|
@ -0,0 +1,48 @@
|
|||
//! Very simple shader used to demonstrate how to get the world position and pass data
|
||||
//! between the vertex and fragment shader. Also shows the custom vertex layout.
|
||||
|
||||
// First we import everything we need from bevy_pbr
|
||||
// A 2d shader would be vevry similar but import from bevy_sprite instead
|
||||
#import bevy_pbr::{
|
||||
mesh_functions,
|
||||
view_transformations::position_world_to_clip
|
||||
}
|
||||
|
||||
struct Vertex {
|
||||
// This is needed if you are using batching and/or gpu preprocessing
|
||||
// It's a built in so you don't need to define it in the vertex layout
|
||||
@builtin(instance_index) instance_index: u32,
|
||||
// Like we defined for the vertex layout
|
||||
// position is at location 0
|
||||
@location(0) position: vec3<f32>,
|
||||
// and color at location 1
|
||||
@location(1) color: vec4<f32>,
|
||||
};
|
||||
|
||||
// This is the output of the vertex shader and we also use it as the input for the fragment shader
|
||||
struct VertexOutput {
|
||||
@builtin(position) clip_position: vec4<f32>,
|
||||
@location(0) world_position: vec4<f32>,
|
||||
@location(1) color: vec3<f32>,
|
||||
};
|
||||
|
||||
@vertex
|
||||
fn vertex(vertex: Vertex) -> VertexOutput {
|
||||
var out: VertexOutput;
|
||||
// This is how bevy computes the world position
|
||||
// The vertex.instance_index is very important. Esepecially if you are using batching and gpu preprocessing
|
||||
var world_from_local = mesh_functions::get_world_from_local(vertex.instance_index);
|
||||
out.world_position = mesh_functions::mesh_position_local_to_world(world_from_local, vec4(vertex.position, 1.0));
|
||||
out.clip_position = position_world_to_clip(out.world_position.xyz);
|
||||
|
||||
// We just use the raw vertex color
|
||||
out.color = vertex.color.rgb;
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fragment(in: VertexOutput) -> @location(0) vec4<f32> {
|
||||
// output the color directly
|
||||
return vec4(in.color, 1.0);
|
||||
}
|
BIN
assets/volumes/fog_noise.ktx2
Normal file
BIN
assets/volumes/fog_noise.ktx2
Normal file
Binary file not shown.
|
@ -6,20 +6,23 @@ publish = false
|
|||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
glam = "0.27"
|
||||
glam = "0.28"
|
||||
rand = "0.8"
|
||||
rand_chacha = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
bevy_app = { path = "../crates/bevy_app" }
|
||||
bevy_ecs = { path = "../crates/bevy_ecs", features = ["multi_threaded"] }
|
||||
bevy_hierarchy = { path = "../crates/bevy_hierarchy" }
|
||||
bevy_internal = { path = "../crates/bevy_internal" }
|
||||
bevy_math = { path = "../crates/bevy_math" }
|
||||
bevy_reflect = { path = "../crates/bevy_reflect" }
|
||||
bevy_reflect = { path = "../crates/bevy_reflect", features = ["functions"] }
|
||||
bevy_render = { path = "../crates/bevy_render" }
|
||||
bevy_tasks = { path = "../crates/bevy_tasks" }
|
||||
bevy_utils = { path = "../crates/bevy_utils" }
|
||||
|
||||
# make bevy_render compile on linux. x11 vs wayland does not matter here as the benches do not actually use a window
|
||||
[target.'cfg(target_os = "linux")'.dev-dependencies]
|
||||
bevy_winit = { path = "../crates/bevy_winit", features = ["x11"] }
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
lto = true
|
||||
|
@ -34,6 +37,11 @@ name = "ecs"
|
|||
path = "benches/bevy_ecs/benches.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "reflect_function"
|
||||
path = "benches/bevy_reflect/function.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "reflect_list"
|
||||
path = "benches/bevy_reflect/list.rs"
|
||||
|
|
|
@ -2,6 +2,7 @@ use criterion::criterion_main;
|
|||
|
||||
mod components;
|
||||
mod events;
|
||||
mod fragmentation;
|
||||
mod iteration;
|
||||
mod observers;
|
||||
mod scheduling;
|
||||
|
@ -11,6 +12,7 @@ criterion_main!(
|
|||
components::components_benches,
|
||||
events::event_benches,
|
||||
iteration::iterations_benches,
|
||||
fragmentation::fragmentation_benches,
|
||||
observers::observer_benches,
|
||||
scheduling::scheduling_benches,
|
||||
world::world_benches,
|
||||
|
|
99
benches/benches/bevy_ecs/fragmentation/mod.rs
Normal file
99
benches/benches/bevy_ecs/fragmentation/mod.rs
Normal file
|
@ -0,0 +1,99 @@
|
|||
use bevy_ecs::prelude::*;
|
||||
use bevy_ecs::system::SystemState;
|
||||
use criterion::*;
|
||||
use glam::*;
|
||||
use std::hint::black_box;
|
||||
|
||||
criterion_group!(fragmentation_benches, iter_frag_empty);
|
||||
|
||||
#[derive(Component, Default)]
|
||||
struct Table<const X: usize = 0>(usize);
|
||||
#[derive(Component, Default)]
|
||||
#[component(storage = "SparseSet")]
|
||||
struct Sparse<const X: usize = 0>(usize);
|
||||
|
||||
fn flip_coin() -> bool {
|
||||
rand::random::<bool>()
|
||||
}
|
||||
fn iter_frag_empty(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("iter_fragmented(4096)_empty");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
|
||||
group.bench_function("foreach_table", |b| {
|
||||
let mut world = World::new();
|
||||
spawn_empty_frag_archetype::<Table>(&mut world);
|
||||
let mut q: SystemState<Query<(Entity, &Table)>> =
|
||||
SystemState::<Query<(Entity, &Table<0>)>>::new(&mut world);
|
||||
let query = q.get(&world);
|
||||
b.iter(move || {
|
||||
let mut res = 0;
|
||||
query.iter().for_each(|(e, t)| {
|
||||
res += e.to_bits();
|
||||
black_box(t);
|
||||
});
|
||||
});
|
||||
});
|
||||
group.bench_function("foreach_sparse", |b| {
|
||||
let mut world = World::new();
|
||||
spawn_empty_frag_archetype::<Sparse>(&mut world);
|
||||
let mut q: SystemState<Query<(Entity, &Sparse)>> =
|
||||
SystemState::<Query<(Entity, &Sparse<0>)>>::new(&mut world);
|
||||
let query = q.get(&world);
|
||||
b.iter(move || {
|
||||
let mut res = 0;
|
||||
query.iter().for_each(|(e, t)| {
|
||||
res += e.to_bits();
|
||||
black_box(t);
|
||||
});
|
||||
});
|
||||
});
|
||||
group.finish();
|
||||
|
||||
fn spawn_empty_frag_archetype<T: Component + Default>(world: &mut World) {
|
||||
for i in 0..65536 {
|
||||
let mut e = world.spawn_empty();
|
||||
if flip_coin() {
|
||||
e.insert(Table::<1>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<2>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<3>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<4>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<5>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<6>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<7>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<8>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<9>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<10>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<11>(0));
|
||||
}
|
||||
if flip_coin() {
|
||||
e.insert(Table::<12>(0));
|
||||
}
|
||||
e.insert(T::default());
|
||||
|
||||
if i != 0 {
|
||||
e.despawn();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
use bevy_ecs::prelude::*;
|
||||
use rand::{prelude::SliceRandom, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
||||
#[derive(Component, Copy, Clone)]
|
||||
struct TableData(f32);
|
||||
|
||||
#[derive(Component, Copy, Clone)]
|
||||
#[component(storage = "SparseSet")]
|
||||
struct SparseData(f32);
|
||||
|
||||
fn deterministic_rand() -> ChaCha8Rng {
|
||||
ChaCha8Rng::seed_from_u64(42)
|
||||
}
|
||||
pub struct Benchmark<'w>(World, QueryState<(&'w mut TableData, &'w SparseData)>);
|
||||
|
||||
impl<'w> Benchmark<'w> {
|
||||
pub fn new() -> Self {
|
||||
let mut world = World::new();
|
||||
|
||||
let mut v = vec![];
|
||||
for _ in 0..10000 {
|
||||
world.spawn((TableData(0.0), SparseData(0.0))).id();
|
||||
v.push(world.spawn(TableData(0.)).id());
|
||||
}
|
||||
|
||||
// by shuffling ,randomize the archetype iteration order to significantly deviate from the table order. This maximizes the loss of cache locality during archetype-based iteration.
|
||||
v.shuffle(&mut deterministic_rand());
|
||||
for e in v.into_iter() {
|
||||
world.entity_mut(e).despawn();
|
||||
}
|
||||
|
||||
let query = world.query::<(&mut TableData, &SparseData)>();
|
||||
Self(world, query)
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
pub fn run(&mut self) {
|
||||
self.1
|
||||
.iter_mut(&mut self.0)
|
||||
.for_each(|(mut v1, v2)| v1.0 += v2.0)
|
||||
}
|
||||
}
|
|
@ -11,6 +11,7 @@ mod iter_frag_wide;
|
|||
mod iter_frag_wide_sparse;
|
||||
mod iter_simple;
|
||||
mod iter_simple_foreach;
|
||||
mod iter_simple_foreach_hybrid;
|
||||
mod iter_simple_foreach_sparse_set;
|
||||
mod iter_simple_foreach_wide;
|
||||
mod iter_simple_foreach_wide_sparse_set;
|
||||
|
@ -71,6 +72,10 @@ fn iter_simple(c: &mut Criterion) {
|
|||
let mut bench = iter_simple_foreach_wide_sparse_set::Benchmark::new();
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
group.bench_function("foreach_hybrid", |b| {
|
||||
let mut bench = iter_simple_foreach_hybrid::Benchmark::new();
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
group.finish();
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use criterion::criterion_group;
|
||||
|
||||
mod propagation;
|
||||
mod simple;
|
||||
use propagation::*;
|
||||
use simple::*;
|
||||
|
||||
criterion_group!(observer_benches, event_propagation);
|
||||
criterion_group!(observer_benches, event_propagation, observe_simple);
|
||||
|
|
|
@ -1,99 +1,66 @@
|
|||
use bevy_app::{App, First, Startup};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
event::{Event, EventWriter},
|
||||
observer::Trigger,
|
||||
query::{Or, With, Without},
|
||||
system::{Commands, EntityCommands, Query},
|
||||
world::World,
|
||||
};
|
||||
use bevy_hierarchy::{BuildChildren, Children, Parent};
|
||||
use bevy_internal::MinimalPlugins;
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use rand::{prelude::SliceRandom, SeedableRng};
|
||||
use rand::{seq::IteratorRandom, Rng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
||||
const DENSITY: usize = 20; // percent of nodes with listeners
|
||||
const ENTITY_DEPTH: usize = 64;
|
||||
const ENTITY_WIDTH: usize = 200;
|
||||
const N_EVENTS: usize = 500;
|
||||
fn deterministic_rand() -> ChaCha8Rng {
|
||||
ChaCha8Rng::seed_from_u64(42)
|
||||
}
|
||||
|
||||
pub fn event_propagation(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group("event_propagation");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
|
||||
group.bench_function("baseline", |bencher| {
|
||||
let mut app = App::new();
|
||||
app.add_plugins(MinimalPlugins)
|
||||
.add_systems(Startup, spawn_listener_hierarchy);
|
||||
app.update();
|
||||
|
||||
bencher.iter(|| {
|
||||
black_box(app.update());
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("single_event_type", |bencher| {
|
||||
let mut app = App::new();
|
||||
app.add_plugins(MinimalPlugins)
|
||||
.add_systems(
|
||||
Startup,
|
||||
(
|
||||
spawn_listener_hierarchy,
|
||||
add_listeners_to_hierarchy::<DENSITY, 1>,
|
||||
),
|
||||
)
|
||||
.add_systems(First, send_events::<1, N_EVENTS>);
|
||||
app.update();
|
||||
let mut world = World::new();
|
||||
let (roots, leaves, nodes) = spawn_listener_hierarchy(&mut world);
|
||||
add_listeners_to_hierarchy::<DENSITY, 1>(&roots, &leaves, &nodes, &mut world);
|
||||
|
||||
bencher.iter(|| {
|
||||
black_box(app.update());
|
||||
send_events::<1, N_EVENTS>(&mut world, &leaves);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("single_event_type_no_listeners", |bencher| {
|
||||
let mut app = App::new();
|
||||
app.add_plugins(MinimalPlugins)
|
||||
.add_systems(
|
||||
Startup,
|
||||
(
|
||||
spawn_listener_hierarchy,
|
||||
add_listeners_to_hierarchy::<DENSITY, 1>,
|
||||
),
|
||||
)
|
||||
.add_systems(First, send_events::<9, N_EVENTS>);
|
||||
app.update();
|
||||
let mut world = World::new();
|
||||
let (roots, leaves, nodes) = spawn_listener_hierarchy(&mut world);
|
||||
add_listeners_to_hierarchy::<DENSITY, 1>(&roots, &leaves, &nodes, &mut world);
|
||||
|
||||
bencher.iter(|| {
|
||||
black_box(app.update());
|
||||
// no listeners to observe TestEvent<9>
|
||||
send_events::<9, N_EVENTS>(&mut world, &leaves);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("four_event_types", |bencher| {
|
||||
let mut app = App::new();
|
||||
let mut world = World::new();
|
||||
let (roots, leaves, nodes) = spawn_listener_hierarchy(&mut world);
|
||||
const FRAC_N_EVENTS_4: usize = N_EVENTS / 4;
|
||||
const FRAC_DENSITY_4: usize = DENSITY / 4;
|
||||
|
||||
app.add_plugins(MinimalPlugins)
|
||||
.add_systems(
|
||||
Startup,
|
||||
(
|
||||
spawn_listener_hierarchy,
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 1>,
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 2>,
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 3>,
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 4>,
|
||||
),
|
||||
)
|
||||
.add_systems(First, send_events::<1, FRAC_N_EVENTS_4>)
|
||||
.add_systems(First, send_events::<2, FRAC_N_EVENTS_4>)
|
||||
.add_systems(First, send_events::<3, FRAC_N_EVENTS_4>)
|
||||
.add_systems(First, send_events::<4, FRAC_N_EVENTS_4>);
|
||||
app.update();
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 1>(&roots, &leaves, &nodes, &mut world);
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 2>(&roots, &leaves, &nodes, &mut world);
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 3>(&roots, &leaves, &nodes, &mut world);
|
||||
add_listeners_to_hierarchy::<FRAC_DENSITY_4, 4>(&roots, &leaves, &nodes, &mut world);
|
||||
|
||||
bencher.iter(|| {
|
||||
black_box(app.update());
|
||||
send_events::<1, FRAC_N_EVENTS_4>(&mut world, &leaves);
|
||||
send_events::<2, FRAC_N_EVENTS_4>(&mut world, &leaves);
|
||||
send_events::<3, FRAC_N_EVENTS_4>(&mut world, &leaves);
|
||||
send_events::<4, FRAC_N_EVENTS_4>(&mut world, &leaves);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -108,44 +75,54 @@ impl<const N: usize> Event for TestEvent<N> {
|
|||
const AUTO_PROPAGATE: bool = true;
|
||||
}
|
||||
|
||||
fn send_events<const N: usize, const N_EVENTS: usize>(
|
||||
mut commands: Commands,
|
||||
entities: Query<Entity, Without<Children>>,
|
||||
) {
|
||||
let target = entities.iter().choose(&mut rand::thread_rng()).unwrap();
|
||||
fn send_events<const N: usize, const N_EVENTS: usize>(world: &mut World, leaves: &Vec<Entity>) {
|
||||
let target = leaves.iter().choose(&mut rand::thread_rng()).unwrap();
|
||||
|
||||
(0..N_EVENTS).for_each(|_| {
|
||||
commands.trigger_targets(TestEvent::<N> {}, target);
|
||||
world.trigger_targets(TestEvent::<N> {}, *target);
|
||||
});
|
||||
}
|
||||
|
||||
fn spawn_listener_hierarchy(mut commands: Commands) {
|
||||
fn spawn_listener_hierarchy(world: &mut World) -> (Vec<Entity>, Vec<Entity>, Vec<Entity>) {
|
||||
let mut roots = vec![];
|
||||
let mut leaves = vec![];
|
||||
let mut nodes = vec![];
|
||||
for _ in 0..ENTITY_WIDTH {
|
||||
let mut parent = commands.spawn_empty().id();
|
||||
let mut parent = world.spawn_empty().id();
|
||||
roots.push(parent);
|
||||
for _ in 0..ENTITY_DEPTH {
|
||||
let child = commands.spawn_empty().id();
|
||||
commands.entity(parent).add_child(child);
|
||||
let child = world.spawn_empty().id();
|
||||
nodes.push(child);
|
||||
|
||||
world.entity_mut(parent).add_child(child);
|
||||
parent = child;
|
||||
}
|
||||
nodes.pop();
|
||||
leaves.push(parent);
|
||||
}
|
||||
(roots, leaves, nodes)
|
||||
}
|
||||
|
||||
fn add_listeners_to_hierarchy<const DENSITY: usize, const N: usize>(
|
||||
roots: &Vec<Entity>,
|
||||
leaves: &Vec<Entity>,
|
||||
nodes: &Vec<Entity>,
|
||||
world: &mut World,
|
||||
) {
|
||||
for e in roots.iter() {
|
||||
world.entity_mut(*e).observe(empty_listener::<N>);
|
||||
}
|
||||
for e in leaves.iter() {
|
||||
world.entity_mut(*e).observe(empty_listener::<N>);
|
||||
}
|
||||
let mut rng = deterministic_rand();
|
||||
for e in nodes.iter() {
|
||||
if rng.gen_bool(DENSITY as f64 / 100.0) {
|
||||
world.entity_mut(*e).observe(empty_listener::<N>);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_listener<const N: usize>(_trigger: Trigger<TestEvent<N>>) {}
|
||||
|
||||
fn add_listeners_to_hierarchy<const DENSITY: usize, const N: usize>(
|
||||
mut commands: Commands,
|
||||
roots_and_leaves: Query<Entity, Or<(Without<Parent>, Without<Children>)>>,
|
||||
nodes: Query<Entity, (With<Parent>, With<Children>)>,
|
||||
) {
|
||||
for entity in &roots_and_leaves {
|
||||
commands.entity(entity).observe(empty_listener::<N>);
|
||||
}
|
||||
for entity in &nodes {
|
||||
maybe_insert_listener::<DENSITY, N>(&mut commands.entity(entity));
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_insert_listener<const DENSITY: usize, const N: usize>(commands: &mut EntityCommands) {
|
||||
if rand::thread_rng().gen_bool(DENSITY as f64 / 100.0) {
|
||||
commands.observe(empty_listener::<N>);
|
||||
}
|
||||
fn empty_listener<const N: usize>(trigger: Trigger<TestEvent<N>>) {
|
||||
black_box(trigger);
|
||||
}
|
||||
|
|
49
benches/benches/bevy_ecs/observers/simple.rs
Normal file
49
benches/benches/bevy_ecs/observers/simple.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use bevy_ecs::{entity::Entity, event::Event, observer::Trigger, world::World};
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use rand::{prelude::SliceRandom, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
fn deterministic_rand() -> ChaCha8Rng {
|
||||
ChaCha8Rng::seed_from_u64(42)
|
||||
}
|
||||
|
||||
#[derive(Clone, Event)]
|
||||
struct EventBase;
|
||||
|
||||
pub fn observe_simple(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group("observe");
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
|
||||
group.bench_function("trigger_simple", |bencher| {
|
||||
let mut world = World::new();
|
||||
world.observe(empty_listener_base);
|
||||
bencher.iter(|| {
|
||||
for _ in 0..10000 {
|
||||
world.trigger(EventBase)
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("trigger_targets_simple/10000_entity", |bencher| {
|
||||
let mut world = World::new();
|
||||
let mut entities = vec![];
|
||||
for _ in 0..10000 {
|
||||
entities.push(world.spawn_empty().observe(empty_listener_base).id());
|
||||
}
|
||||
entities.shuffle(&mut deterministic_rand());
|
||||
bencher.iter(|| {
|
||||
send_base_event(&mut world, &entities);
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
fn empty_listener_base(trigger: Trigger<EventBase>) {
|
||||
black_box(trigger);
|
||||
}
|
||||
|
||||
fn send_base_event(world: &mut World, entities: &Vec<Entity>) {
|
||||
world.trigger_targets(EventBase, entities);
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
use std::mem::size_of;
|
||||
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
|
@ -43,19 +45,11 @@ pub fn spawn_commands(criterion: &mut Criterion) {
|
|||
bencher.iter(|| {
|
||||
let mut commands = Commands::new(&mut command_queue, &world);
|
||||
for i in 0..entity_count {
|
||||
let mut entity = commands.spawn_empty();
|
||||
|
||||
if black_box(i % 2 == 0) {
|
||||
entity.insert(A);
|
||||
}
|
||||
|
||||
if black_box(i % 3 == 0) {
|
||||
entity.insert(B);
|
||||
}
|
||||
|
||||
if black_box(i % 4 == 0) {
|
||||
entity.insert(C);
|
||||
}
|
||||
let mut entity = commands
|
||||
.spawn_empty()
|
||||
.insert_if(A, || black_box(i % 2 == 0))
|
||||
.insert_if(B, || black_box(i % 3 == 0))
|
||||
.insert_if(C, || black_box(i % 4 == 0));
|
||||
|
||||
if black_box(i % 5 == 0) {
|
||||
entity.despawn();
|
||||
|
@ -184,8 +178,7 @@ impl Default for LargeStruct {
|
|||
}
|
||||
|
||||
pub fn sized_commands_impl<T: Default + Command>(criterion: &mut Criterion) {
|
||||
let mut group =
|
||||
criterion.benchmark_group(format!("sized_commands_{}_bytes", std::mem::size_of::<T>()));
|
||||
let mut group = criterion.benchmark_group(format!("sized_commands_{}_bytes", size_of::<T>()));
|
||||
group.warm_up_time(std::time::Duration::from_millis(500));
|
||||
group.measurement_time(std::time::Duration::from_secs(4));
|
||||
|
||||
|
|
|
@ -20,7 +20,8 @@ fn cubic_2d(c: &mut Criterion) {
|
|||
vec2(1.0, 0.0),
|
||||
vec2(1.0, 1.0),
|
||||
]])
|
||||
.to_curve();
|
||||
.to_curve()
|
||||
.expect("Unable to build a curve from this data");
|
||||
c.bench_function("cubic_position_Vec2", |b| {
|
||||
b.iter(|| black_box(bezier.position(black_box(0.5))));
|
||||
});
|
||||
|
@ -33,7 +34,8 @@ fn cubic(c: &mut Criterion) {
|
|||
vec3a(1.0, 0.0, 0.0),
|
||||
vec3a(1.0, 1.0, 1.0),
|
||||
]])
|
||||
.to_curve();
|
||||
.to_curve()
|
||||
.expect("Unable to build a curve from this data");
|
||||
c.bench_function("cubic_position_Vec3A", |b| {
|
||||
b.iter(|| black_box(bezier.position(black_box(0.5))));
|
||||
});
|
||||
|
@ -46,7 +48,8 @@ fn cubic_vec3(c: &mut Criterion) {
|
|||
vec3(1.0, 0.0, 0.0),
|
||||
vec3(1.0, 1.0, 1.0),
|
||||
]])
|
||||
.to_curve();
|
||||
.to_curve()
|
||||
.expect("Unable to build a curve from this data");
|
||||
c.bench_function("cubic_position_Vec3", |b| {
|
||||
b.iter(|| black_box(bezier.position(black_box(0.5))));
|
||||
});
|
||||
|
@ -59,7 +62,8 @@ fn build_pos_cubic(c: &mut Criterion) {
|
|||
vec3a(1.0, 0.0, 0.0),
|
||||
vec3a(1.0, 1.0, 1.0),
|
||||
]])
|
||||
.to_curve();
|
||||
.to_curve()
|
||||
.expect("Unable to build a curve from this data");
|
||||
c.bench_function("build_pos_cubic_100_points", |b| {
|
||||
b.iter(|| black_box(bezier.iter_positions(black_box(100)).collect::<Vec<_>>()));
|
||||
});
|
||||
|
@ -72,7 +76,8 @@ fn build_accel_cubic(c: &mut Criterion) {
|
|||
vec3a(1.0, 0.0, 0.0),
|
||||
vec3a(1.0, 1.0, 1.0),
|
||||
]])
|
||||
.to_curve();
|
||||
.to_curve()
|
||||
.expect("Unable to build a curve from this data");
|
||||
c.bench_function("build_accel_cubic_100_points", |b| {
|
||||
b.iter(|| black_box(bezier.iter_positions(black_box(100)).collect::<Vec<_>>()));
|
||||
});
|
||||
|
|
62
benches/benches/bevy_reflect/function.rs
Normal file
62
benches/benches/bevy_reflect/function.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
use bevy_reflect::func::{ArgList, IntoFunction, TypedFunction};
|
||||
use bevy_reflect::prelude::*;
|
||||
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
|
||||
criterion_group!(benches, typed, into, call, clone);
|
||||
criterion_main!(benches);
|
||||
|
||||
fn add(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
|
||||
fn typed(c: &mut Criterion) {
|
||||
c.benchmark_group("typed")
|
||||
.bench_function("function", |b| {
|
||||
b.iter(|| add.get_function_info());
|
||||
})
|
||||
.bench_function("closure", |b| {
|
||||
let capture = 25;
|
||||
let closure = |a: i32| a + capture;
|
||||
b.iter(|| closure.get_function_info());
|
||||
});
|
||||
}
|
||||
|
||||
fn into(c: &mut Criterion) {
|
||||
c.benchmark_group("into")
|
||||
.bench_function("function", |b| {
|
||||
b.iter(|| add.into_function());
|
||||
})
|
||||
.bench_function("closure", |b| {
|
||||
let capture = 25;
|
||||
let closure = |a: i32| a + capture;
|
||||
b.iter(|| closure.into_function());
|
||||
});
|
||||
}
|
||||
|
||||
fn call(c: &mut Criterion) {
|
||||
c.benchmark_group("call")
|
||||
.bench_function("function", |b| {
|
||||
let add = add.into_function();
|
||||
b.iter_batched(
|
||||
|| ArgList::new().push_owned(75_i32).push_owned(25_i32),
|
||||
|args| add.call(args),
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
})
|
||||
.bench_function("closure", |b| {
|
||||
let capture = 25;
|
||||
let add = (|a: i32| a + capture).into_function();
|
||||
b.iter_batched(
|
||||
|| ArgList::new().push_owned(75_i32),
|
||||
|args| add.call(args),
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn clone(c: &mut Criterion) {
|
||||
c.benchmark_group("clone").bench_function("function", |b| {
|
||||
let add = add.into_function();
|
||||
b.iter(|| add.clone());
|
||||
});
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use bevy_reflect::{DynamicStruct, GetField, Reflect, Struct};
|
||||
use bevy_reflect::{DynamicStruct, GetField, PartialReflect, Reflect, Struct};
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
|
@ -62,7 +62,7 @@ fn concrete_struct_apply(criterion: &mut Criterion) {
|
|||
|
||||
// Use functions that produce trait objects of varying concrete types as the
|
||||
// input to the benchmark.
|
||||
let inputs: &[fn() -> (Box<dyn Struct>, Box<dyn Reflect>)] = &[
|
||||
let inputs: &[fn() -> (Box<dyn Struct>, Box<dyn PartialReflect>)] = &[
|
||||
|| (Box::new(Struct16::default()), Box::new(Struct16::default())),
|
||||
|| (Box::new(Struct32::default()), Box::new(Struct32::default())),
|
||||
|| (Box::new(Struct64::default()), Box::new(Struct64::default())),
|
||||
|
@ -240,7 +240,7 @@ fn dynamic_struct_apply(criterion: &mut Criterion) {
|
|||
group.warm_up_time(WARM_UP_TIME);
|
||||
group.measurement_time(MEASUREMENT_TIME);
|
||||
|
||||
let patches: &[(fn() -> Box<dyn Reflect>, usize)] = &[
|
||||
let patches: &[(fn() -> Box<dyn PartialReflect>, usize)] = &[
|
||||
(|| Box::new(Struct16::default()), 16),
|
||||
(|| Box::new(Struct32::default()), 32),
|
||||
(|| Box::new(Struct64::default()), 64),
|
||||
|
|
|
@ -4,12 +4,9 @@ use bevy_render::mesh::TorusMeshBuilder;
|
|||
|
||||
fn torus(c: &mut Criterion) {
|
||||
c.bench_function("build_torus", |b| {
|
||||
b.iter(|| black_box(TorusMeshBuilder::new(black_box(0.5),black_box(1.0))));
|
||||
b.iter(|| black_box(TorusMeshBuilder::new(black_box(0.5), black_box(1.0))));
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
torus,
|
||||
);
|
||||
criterion_group!(benches, torus,);
|
||||
criterion_main!(benches);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
doc-valid-idents = [
|
||||
"GilRs",
|
||||
"glTF",
|
||||
"MacOS",
|
||||
"macOS",
|
||||
"NVidia",
|
||||
"OpenXR",
|
||||
"sRGB",
|
||||
|
|
|
@ -13,6 +13,7 @@ keywords = ["bevy", "accessibility", "a11y"]
|
|||
bevy_app = { path = "../bevy_app", version = "0.15.0-dev" }
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.15.0-dev" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.15.0-dev" }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev" }
|
||||
|
||||
accesskit = "0.16"
|
||||
|
||||
|
@ -20,5 +21,5 @@ accesskit = "0.16"
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -17,10 +17,11 @@ use accesskit::NodeBuilder;
|
|||
use bevy_app::Plugin;
|
||||
use bevy_derive::{Deref, DerefMut};
|
||||
use bevy_ecs::{
|
||||
prelude::{Component, Entity, Event},
|
||||
prelude::{Component, Entity, Event, ReflectResource},
|
||||
schedule::SystemSet,
|
||||
system::Resource,
|
||||
};
|
||||
use bevy_reflect::Reflect;
|
||||
|
||||
/// Wrapper struct for [`accesskit::ActionRequest`]. Required to allow it to be used as an `Event`.
|
||||
#[derive(Event, Deref, DerefMut)]
|
||||
|
@ -92,7 +93,8 @@ impl From<NodeBuilder> for AccessibilityNode {
|
|||
}
|
||||
|
||||
/// Resource representing which entity has keyboard focus, if any.
|
||||
#[derive(Resource, Default, Deref, DerefMut)]
|
||||
#[derive(Resource, Default, Deref, DerefMut, Reflect)]
|
||||
#[reflect(Resource)]
|
||||
pub struct Focus(pub Option<Entity>);
|
||||
|
||||
/// Set enum for the systems relating to accessibility
|
||||
|
@ -108,6 +110,8 @@ pub struct AccessibilityPlugin;
|
|||
|
||||
impl Plugin for AccessibilityPlugin {
|
||||
fn build(&self, app: &mut bevy_app::App) {
|
||||
app.register_type::<Focus>();
|
||||
|
||||
app.init_resource::<AccessibilityRequested>()
|
||||
.init_resource::<ManageAccessibilityUpdates>()
|
||||
.init_resource::<Focus>()
|
||||
|
|
|
@ -42,5 +42,5 @@ uuid = { version = "1.7", features = ["v4"] }
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -192,6 +192,23 @@ impl AnimationGraph {
|
|||
(graph, node_index)
|
||||
}
|
||||
|
||||
/// A convenience method to create an [`AnimationGraph`]s with an iterator
|
||||
/// of clips.
|
||||
///
|
||||
/// All of the animation clips will be direct children of the root with
|
||||
/// weight 1.0.
|
||||
///
|
||||
/// Returns the the graph and indices of the new nodes.
|
||||
pub fn from_clips<'a, I>(clips: I) -> (Self, Vec<AnimationNodeIndex>)
|
||||
where
|
||||
I: IntoIterator<Item = Handle<AnimationClip>>,
|
||||
<I as IntoIterator>::IntoIter: 'a,
|
||||
{
|
||||
let mut graph = Self::new();
|
||||
let indices = graph.add_clips(clips, 1.0, graph.root).collect();
|
||||
(graph, indices)
|
||||
}
|
||||
|
||||
/// Adds an [`AnimationClip`] to the animation graph with the given weight
|
||||
/// and returns its index.
|
||||
///
|
||||
|
@ -225,7 +242,7 @@ impl AnimationGraph {
|
|||
) -> impl Iterator<Item = AnimationNodeIndex> + 'a
|
||||
where
|
||||
I: IntoIterator<Item = Handle<AnimationClip>>,
|
||||
<I as std::iter::IntoIterator>::IntoIter: 'a,
|
||||
<I as IntoIterator>::IntoIter: 'a,
|
||||
{
|
||||
clips
|
||||
.into_iter()
|
||||
|
|
50
crates/bevy_animation/src/lib.rs
Normal file → Executable file
50
crates/bevy_animation/src/lib.rs
Normal file → Executable file
|
@ -155,6 +155,29 @@ impl VariableCurve {
|
|||
|
||||
Some(step_start)
|
||||
}
|
||||
|
||||
/// Find the index of the keyframe at or before the current time.
|
||||
///
|
||||
/// Returns the first keyframe if the `seek_time` is before the first keyframe, and
|
||||
/// the second-to-last keyframe if the `seek_time` is after the last keyframe.
|
||||
/// Panics if there are less than 2 keyframes.
|
||||
pub fn find_interpolation_start_keyframe(&self, seek_time: f32) -> usize {
|
||||
// An Ok(keyframe_index) result means an exact result was found by binary search
|
||||
// An Err result means the keyframe was not found, and the index is the keyframe
|
||||
// PERF: finding the current keyframe can be optimised
|
||||
let search_result = self
|
||||
.keyframe_timestamps
|
||||
.binary_search_by(|probe| probe.partial_cmp(&seek_time).unwrap());
|
||||
|
||||
// We want to find the index of the keyframe before the current time
|
||||
// If the keyframe is past the second-to-last keyframe, the animation cannot be interpolated.
|
||||
match search_result {
|
||||
// An exact match was found
|
||||
Ok(i) => i.clamp(0, self.keyframe_timestamps.len() - 2),
|
||||
// No exact match was found, so return the previous keyframe to interpolate from.
|
||||
Err(i) => (i.saturating_sub(1)).clamp(0, self.keyframe_timestamps.len() - 2),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Interpolation method to use between keyframes.
|
||||
|
@ -419,8 +442,9 @@ impl ActiveAnimation {
|
|||
}
|
||||
|
||||
/// Sets the weight of this animation.
|
||||
pub fn set_weight(&mut self, weight: f32) {
|
||||
pub fn set_weight(&mut self, weight: f32) -> &mut Self {
|
||||
self.weight = weight;
|
||||
self
|
||||
}
|
||||
|
||||
/// Pause the animation.
|
||||
|
@ -505,7 +529,10 @@ impl ActiveAnimation {
|
|||
}
|
||||
}
|
||||
|
||||
/// Animation controls
|
||||
/// Animation controls.
|
||||
///
|
||||
/// Automatically added to any root animations of a `SceneBundle` when it is
|
||||
/// spawned.
|
||||
#[derive(Component, Default, Reflect)]
|
||||
#[reflect(Component)]
|
||||
pub struct AnimationPlayer {
|
||||
|
@ -564,14 +591,14 @@ thread_local! {
|
|||
impl AnimationPlayer {
|
||||
/// Start playing an animation, restarting it if necessary.
|
||||
pub fn start(&mut self, animation: AnimationNodeIndex) -> &mut ActiveAnimation {
|
||||
self.active_animations.entry(animation).or_default()
|
||||
let playing_animation = self.active_animations.entry(animation).or_default();
|
||||
playing_animation.replay();
|
||||
playing_animation
|
||||
}
|
||||
|
||||
/// Start playing an animation, unless the requested animation is already playing.
|
||||
pub fn play(&mut self, animation: AnimationNodeIndex) -> &mut ActiveAnimation {
|
||||
let playing_animation = self.active_animations.entry(animation).or_default();
|
||||
playing_animation.weight = 1.0;
|
||||
playing_animation
|
||||
self.active_animations.entry(animation).or_default()
|
||||
}
|
||||
|
||||
/// Stops playing the given animation, removing it from the list of playing
|
||||
|
@ -603,6 +630,7 @@ impl AnimationPlayer {
|
|||
self.active_animations.iter_mut()
|
||||
}
|
||||
|
||||
#[deprecated = "Use `animation_is_playing` instead"]
|
||||
/// Check if the given animation node is being played.
|
||||
pub fn is_playing_animation(&self, animation: AnimationNodeIndex) -> bool {
|
||||
self.active_animations.contains_key(&animation)
|
||||
|
@ -874,18 +902,16 @@ impl AnimationTargetContext<'_> {
|
|||
// Some curves have only one keyframe used to set a transform
|
||||
if curve.keyframe_timestamps.len() == 1 {
|
||||
self.apply_single_keyframe(curve, weight);
|
||||
return;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find the current keyframe
|
||||
let Some(step_start) = curve.find_current_keyframe(seek_time) else {
|
||||
return;
|
||||
};
|
||||
// Find the best keyframe to interpolate from
|
||||
let step_start = curve.find_interpolation_start_keyframe(seek_time);
|
||||
|
||||
let timestamp_start = curve.keyframe_timestamps[step_start];
|
||||
let timestamp_end = curve.keyframe_timestamps[step_start + 1];
|
||||
// Compute how far we are through the keyframe, normalized to [0, 1]
|
||||
let lerp = f32::inverse_lerp(timestamp_start, timestamp_end, seek_time);
|
||||
let lerp = f32::inverse_lerp(timestamp_start, timestamp_end, seek_time).clamp(0.0, 1.0);
|
||||
|
||||
self.apply_tweened_keyframe(
|
||||
curve,
|
||||
|
|
|
@ -5,9 +5,10 @@
|
|||
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
reflect::ReflectComponent,
|
||||
system::{Query, Res},
|
||||
};
|
||||
use bevy_reflect::Reflect;
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_time::Time;
|
||||
use bevy_utils::Duration;
|
||||
|
||||
|
@ -28,6 +29,7 @@ use crate::{graph::AnimationNodeIndex, ActiveAnimation, AnimationPlayer};
|
|||
/// component to get confused about which animation is the "main" animation, and
|
||||
/// transitions will usually be incorrect as a result.
|
||||
#[derive(Component, Default, Reflect)]
|
||||
#[reflect(Component, Default)]
|
||||
pub struct AnimationTransitions {
|
||||
main_animation: Option<AnimationNodeIndex>,
|
||||
transitions: Vec<AnimationTransition>,
|
||||
|
@ -90,7 +92,11 @@ impl AnimationTransitions {
|
|||
}
|
||||
}
|
||||
|
||||
self.main_animation = Some(new_animation);
|
||||
// If already transitioning away from this animation, cancel the transition.
|
||||
// Otherwise the transition ending would incorrectly stop the new animation.
|
||||
self.transitions
|
||||
.retain(|transition| transition.animation != new_animation);
|
||||
|
||||
player.start(new_animation)
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,11 @@ trace = []
|
|||
bevy_debug_stepping = []
|
||||
default = ["bevy_reflect"]
|
||||
bevy_reflect = ["dep:bevy_reflect", "bevy_ecs/bevy_reflect"]
|
||||
reflect_functions = [
|
||||
"bevy_reflect",
|
||||
"bevy_reflect/functions",
|
||||
"bevy_ecs/reflect_functions",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
# bevy
|
||||
|
@ -38,5 +43,5 @@ console_error_panic_hook = "0.1.6"
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -96,6 +96,10 @@ impl Default for App {
|
|||
|
||||
#[cfg(feature = "bevy_reflect")]
|
||||
app.init_resource::<AppTypeRegistry>();
|
||||
|
||||
#[cfg(feature = "reflect_functions")]
|
||||
app.init_resource::<AppFunctionRegistry>();
|
||||
|
||||
app.add_plugins(MainSchedulePlugin);
|
||||
app.add_systems(
|
||||
First,
|
||||
|
@ -553,7 +557,7 @@ impl App {
|
|||
self
|
||||
}
|
||||
|
||||
/// Registers the type `T` in the [`TypeRegistry`](bevy_reflect::TypeRegistry) resource,
|
||||
/// Registers the type `T` in the [`AppTypeRegistry`] resource,
|
||||
/// adding reflect data as specified in the [`Reflect`](bevy_reflect::Reflect) derive:
|
||||
/// ```ignore (No serde "derive" feature)
|
||||
/// #[derive(Component, Serialize, Deserialize, Reflect)]
|
||||
|
@ -567,7 +571,7 @@ impl App {
|
|||
self
|
||||
}
|
||||
|
||||
/// Associates type data `D` with type `T` in the [`TypeRegistry`](bevy_reflect::TypeRegistry) resource.
|
||||
/// Associates type data `D` with type `T` in the [`AppTypeRegistry`] resource.
|
||||
///
|
||||
/// Most of the time [`register_type`](Self::register_type) can be used instead to register a
|
||||
/// type you derived [`Reflect`](bevy_reflect::Reflect) for. However, in cases where you want to
|
||||
|
@ -599,6 +603,156 @@ impl App {
|
|||
self
|
||||
}
|
||||
|
||||
/// Registers the given function into the [`AppFunctionRegistry`] resource.
|
||||
///
|
||||
/// The given function will internally be stored as a [`DynamicFunction`]
|
||||
/// and mapped according to its [name].
|
||||
///
|
||||
/// Because the function must have a name,
|
||||
/// anonymous functions (e.g. `|a: i32, b: i32| { a + b }`) and closures must instead
|
||||
/// be registered using [`register_function_with_name`] or converted to a [`DynamicFunction`]
|
||||
/// and named using [`DynamicFunction::with_name`].
|
||||
/// Failure to do so will result in a panic.
|
||||
///
|
||||
/// Only types that implement [`IntoFunction`] may be registered via this method.
|
||||
///
|
||||
/// See [`FunctionRegistry::register`] for more information.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if a function has already been registered with the given name
|
||||
/// or if the function is missing a name (such as when it is an anonymous function).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use bevy_app::App;
|
||||
///
|
||||
/// fn add(a: i32, b: i32) -> i32 {
|
||||
/// a + b
|
||||
/// }
|
||||
///
|
||||
/// App::new().register_function(add);
|
||||
/// ```
|
||||
///
|
||||
/// Functions cannot be registered more than once.
|
||||
///
|
||||
/// ```should_panic
|
||||
/// use bevy_app::App;
|
||||
///
|
||||
/// fn add(a: i32, b: i32) -> i32 {
|
||||
/// a + b
|
||||
/// }
|
||||
///
|
||||
/// App::new()
|
||||
/// .register_function(add)
|
||||
/// // Panic! A function has already been registered with the name "my_function"
|
||||
/// .register_function(add);
|
||||
/// ```
|
||||
///
|
||||
/// Anonymous functions and closures should be registered using [`register_function_with_name`] or given a name using [`DynamicFunction::with_name`].
|
||||
///
|
||||
/// ```should_panic
|
||||
/// use bevy_app::App;
|
||||
///
|
||||
/// // Panic! Anonymous functions cannot be registered using `register_function`
|
||||
/// App::new().register_function(|a: i32, b: i32| a + b);
|
||||
/// ```
|
||||
///
|
||||
/// [`register_function_with_name`]: Self::register_function_with_name
|
||||
/// [`DynamicFunction`]: bevy_reflect::func::DynamicFunction
|
||||
/// [name]: bevy_reflect::func::FunctionInfo::name
|
||||
/// [`DynamicFunction::with_name`]: bevy_reflect::func::DynamicFunction::with_name
|
||||
/// [`IntoFunction`]: bevy_reflect::func::IntoFunction
|
||||
/// [`FunctionRegistry::register`]: bevy_reflect::func::FunctionRegistry::register
|
||||
#[cfg(feature = "reflect_functions")]
|
||||
pub fn register_function<F, Marker>(&mut self, function: F) -> &mut Self
|
||||
where
|
||||
F: bevy_reflect::func::IntoFunction<'static, Marker> + 'static,
|
||||
{
|
||||
self.main_mut().register_function(function);
|
||||
self
|
||||
}
|
||||
|
||||
/// Registers the given function or closure into the [`AppFunctionRegistry`] resource using the given name.
|
||||
///
|
||||
/// To avoid conflicts, it's recommended to use a unique name for the function.
|
||||
/// This can be achieved by "namespacing" the function with a unique identifier,
|
||||
/// such as the name of your crate.
|
||||
///
|
||||
/// For example, to register a function, `add`, from a crate, `my_crate`,
|
||||
/// you could use the name, `"my_crate::add"`.
|
||||
///
|
||||
/// Another approach could be to use the [type name] of the function,
|
||||
/// however, it should be noted that anonymous functions do _not_ have unique type names.
|
||||
///
|
||||
/// For named functions (e.g. `fn add(a: i32, b: i32) -> i32 { a + b }`) where a custom name is not needed,
|
||||
/// it's recommended to use [`register_function`] instead as the generated name is guaranteed to be unique.
|
||||
///
|
||||
/// Only types that implement [`IntoFunction`] may be registered via this method.
|
||||
///
|
||||
/// See [`FunctionRegistry::register_with_name`] for more information.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if a function has already been registered with the given name.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use bevy_app::App;
|
||||
///
|
||||
/// fn mul(a: i32, b: i32) -> i32 {
|
||||
/// a * b
|
||||
/// }
|
||||
///
|
||||
/// let div = |a: i32, b: i32| a / b;
|
||||
///
|
||||
/// App::new()
|
||||
/// // Registering an anonymous function with a unique name
|
||||
/// .register_function_with_name("my_crate::add", |a: i32, b: i32| {
|
||||
/// a + b
|
||||
/// })
|
||||
/// // Registering an existing function with its type name
|
||||
/// .register_function_with_name(std::any::type_name_of_val(&mul), mul)
|
||||
/// // Registering an existing function with a custom name
|
||||
/// .register_function_with_name("my_crate::mul", mul)
|
||||
/// // Be careful not to register anonymous functions with their type name.
|
||||
/// // This code works but registers the function with a non-unique name like `foo::bar::{{closure}}`
|
||||
/// .register_function_with_name(std::any::type_name_of_val(&div), div);
|
||||
/// ```
|
||||
///
|
||||
/// Names must be unique.
|
||||
///
|
||||
/// ```should_panic
|
||||
/// use bevy_app::App;
|
||||
///
|
||||
/// fn one() {}
|
||||
/// fn two() {}
|
||||
///
|
||||
/// App::new()
|
||||
/// .register_function_with_name("my_function", one)
|
||||
/// // Panic! A function has already been registered with the name "my_function"
|
||||
/// .register_function_with_name("my_function", two);
|
||||
/// ```
|
||||
///
|
||||
/// [type name]: std::any::type_name
|
||||
/// [`register_function`]: Self::register_function
|
||||
/// [`IntoFunction`]: bevy_reflect::func::IntoFunction
|
||||
/// [`FunctionRegistry::register_with_name`]: bevy_reflect::func::FunctionRegistry::register_with_name
|
||||
#[cfg(feature = "reflect_functions")]
|
||||
pub fn register_function_with_name<F, Marker>(
|
||||
&mut self,
|
||||
name: impl Into<std::borrow::Cow<'static, str>>,
|
||||
function: F,
|
||||
) -> &mut Self
|
||||
where
|
||||
F: bevy_reflect::func::IntoFunction<'static, Marker> + 'static,
|
||||
{
|
||||
self.main_mut().register_function_with_name(name, function);
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns a reference to the [`World`].
|
||||
pub fn world(&self) -> &World {
|
||||
self.main().world()
|
||||
|
@ -836,6 +990,36 @@ impl App {
|
|||
}
|
||||
|
||||
/// Spawns an [`Observer`] entity, which will watch for and respond to the given event.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # use bevy_app::prelude::*;
|
||||
/// # use bevy_ecs::prelude::*;
|
||||
/// # use bevy_utils::default;
|
||||
/// #
|
||||
/// # let mut app = App::new();
|
||||
/// #
|
||||
/// # #[derive(Event)]
|
||||
/// # struct Party {
|
||||
/// # friends_allowed: bool,
|
||||
/// # };
|
||||
/// #
|
||||
/// # #[derive(Event)]
|
||||
/// # struct Invite;
|
||||
/// #
|
||||
/// # #[derive(Component)]
|
||||
/// # struct Friend;
|
||||
/// #
|
||||
/// // An observer system can be any system where the first parameter is a trigger
|
||||
/// app.observe(|trigger: Trigger<Party>, friends: Query<Entity, With<Friend>>, mut commands: Commands| {
|
||||
/// if trigger.event().friends_allowed {
|
||||
/// for friend in friends.iter() {
|
||||
/// commands.trigger_targets(Invite, friend);
|
||||
/// }
|
||||
/// }
|
||||
/// });
|
||||
/// ```
|
||||
pub fn observe<E: Event, B: Bundle, M>(
|
||||
&mut self,
|
||||
observer: impl IntoObserverSystem<E, B, M>,
|
||||
|
@ -920,7 +1104,7 @@ impl From<u8> for AppExit {
|
|||
}
|
||||
|
||||
impl Termination for AppExit {
|
||||
fn report(self) -> std::process::ExitCode {
|
||||
fn report(self) -> ExitCode {
|
||||
match self {
|
||||
AppExit::Success => ExitCode::SUCCESS,
|
||||
// We leave logging an error to our users
|
||||
|
@ -931,7 +1115,7 @@ impl Termination for AppExit {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{iter, marker::PhantomData, mem, sync::Mutex};
|
||||
use std::{iter, marker::PhantomData, mem::size_of, sync::Mutex};
|
||||
|
||||
use bevy_ecs::{
|
||||
change_detection::{DetectChanges, ResMut},
|
||||
|
@ -1257,7 +1441,7 @@ mod tests {
|
|||
fn app_exit_size() {
|
||||
// There wont be many of them so the size isn't a issue but
|
||||
// it's nice they're so small let's keep it that way.
|
||||
assert_eq!(mem::size_of::<AppExit>(), mem::size_of::<u8>());
|
||||
assert_eq!(size_of::<AppExit>(), size_of::<u8>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -18,7 +18,6 @@ mod sub_app;
|
|||
mod terminal_ctrl_c_handler;
|
||||
|
||||
pub use app::*;
|
||||
pub use bevy_derive::DynamicPlugin;
|
||||
pub use main_schedule::*;
|
||||
pub use panic_handler::*;
|
||||
pub use plugin::*;
|
||||
|
@ -35,9 +34,10 @@ pub mod prelude {
|
|||
app::{App, AppExit},
|
||||
main_schedule::{
|
||||
First, FixedFirst, FixedLast, FixedPostUpdate, FixedPreUpdate, FixedUpdate, Last, Main,
|
||||
PostStartup, PostUpdate, PreStartup, PreUpdate, SpawnScene, Startup, Update,
|
||||
PostStartup, PostUpdate, PreStartup, PreUpdate, RunFixedMainLoop,
|
||||
RunFixedMainLoopSystem, SpawnScene, Startup, Update,
|
||||
},
|
||||
sub_app::SubApp,
|
||||
DynamicPlugin, Plugin, PluginGroup,
|
||||
Plugin, PluginGroup,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use crate::{App, Plugin};
|
||||
use bevy_ecs::{
|
||||
schedule::{ExecutorKind, InternedScheduleLabel, Schedule, ScheduleLabel},
|
||||
schedule::{
|
||||
ExecutorKind, InternedScheduleLabel, IntoSystemSetConfigs, Schedule, ScheduleLabel,
|
||||
SystemSet,
|
||||
},
|
||||
system::{Local, Resource},
|
||||
world::{Mut, World},
|
||||
};
|
||||
|
@ -75,6 +78,11 @@ pub struct First;
|
|||
pub struct PreUpdate;
|
||||
|
||||
/// Runs the [`FixedMain`] schedule in a loop according until all relevant elapsed time has been "consumed".
|
||||
/// If you need to order your variable timestep systems
|
||||
/// before or after the fixed update logic, use the [`RunFixedMainLoopSystem`] system set.
|
||||
///
|
||||
/// Note that in contrast to most other Bevy schedules, systems added directly to
|
||||
/// [`RunFixedMainLoop`] will *not* be parallelized between each other.
|
||||
///
|
||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
|
@ -94,8 +102,16 @@ pub struct FixedFirst;
|
|||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct FixedPreUpdate;
|
||||
|
||||
/// The schedule that contains most gameplay logic.
|
||||
/// The schedule that contains most gameplay logic, which runs at a fixed rate rather than every render frame.
|
||||
/// For logic that should run once per render frame, use the [`Update`] schedule instead.
|
||||
///
|
||||
/// Examples of systems that should run at a fixed rate include (but are not limited to):
|
||||
/// - Physics
|
||||
/// - AI
|
||||
/// - Networking
|
||||
/// - Game rules
|
||||
///
|
||||
/// See the [`Update`] schedule for examples of systems that *should not* use this schedule.
|
||||
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
|
@ -118,8 +134,8 @@ pub struct FixedLast;
|
|||
|
||||
/// The schedule that contains systems which only run after a fixed period of time has elapsed.
|
||||
///
|
||||
/// The exclusive `run_fixed_main_schedule` system runs this schedule.
|
||||
/// This is run by the [`RunFixedMainLoop`] schedule.
|
||||
/// This is run by the [`RunFixedMainLoop`] schedule. If you need to order your variable timestep systems
|
||||
/// before or after the fixed update logic, use the [`RunFixedMainLoopSystem`] system set.
|
||||
///
|
||||
/// Frequency of execution is configured by inserting `Time<Fixed>` resource, 64 Hz by default.
|
||||
/// See [this example](https://github.com/bevyengine/bevy/blob/latest/examples/time/time.rs).
|
||||
|
@ -128,9 +144,15 @@ pub struct FixedLast;
|
|||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct FixedMain;
|
||||
|
||||
/// The schedule that contains app logic. Ideally containing anything that must run once per
|
||||
/// render frame, such as UI.
|
||||
/// The schedule that contains any app logic that must run once per render frame.
|
||||
/// For most gameplay logic, consider using [`FixedUpdate`] instead.
|
||||
///
|
||||
/// Examples of systems that should run once per render frame include (but are not limited to):
|
||||
/// - UI
|
||||
/// - Input handling
|
||||
/// - Audio control
|
||||
///
|
||||
/// See the [`FixedUpdate`] schedule for examples of systems that *should not* use this schedule.
|
||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Update;
|
||||
|
@ -274,7 +296,16 @@ impl Plugin for MainSchedulePlugin {
|
|||
.init_resource::<MainScheduleOrder>()
|
||||
.init_resource::<FixedMainScheduleOrder>()
|
||||
.add_systems(Main, Main::run_main)
|
||||
.add_systems(FixedMain, FixedMain::run_fixed_main);
|
||||
.add_systems(FixedMain, FixedMain::run_fixed_main)
|
||||
.configure_sets(
|
||||
RunFixedMainLoop,
|
||||
(
|
||||
RunFixedMainLoopSystem::BeforeFixedMainLoop,
|
||||
RunFixedMainLoopSystem::FixedMainLoop,
|
||||
RunFixedMainLoopSystem::AfterFixedMainLoop,
|
||||
)
|
||||
.chain(),
|
||||
);
|
||||
|
||||
#[cfg(feature = "bevy_debug_stepping")]
|
||||
{
|
||||
|
@ -338,3 +369,96 @@ impl FixedMain {
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Set enum for the systems that want to run inside [`RunFixedMainLoop`],
|
||||
/// but before or after the fixed update logic. Systems in this set
|
||||
/// will run exactly once per frame, regardless of the number of fixed updates.
|
||||
/// They will also run under a variable timestep.
|
||||
///
|
||||
/// This is useful for handling things that need to run every frame, but
|
||||
/// also need to be read by the fixed update logic. See the individual variants
|
||||
/// for examples of what kind of systems should be placed in each.
|
||||
///
|
||||
/// Note that in contrast to most other Bevy schedules, systems added directly to
|
||||
/// [`RunFixedMainLoop`] will *not* be parallelized between each other.
|
||||
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone, SystemSet)]
|
||||
pub enum RunFixedMainLoopSystem {
|
||||
/// Runs before the fixed update logic.
|
||||
///
|
||||
/// A good example of a system that fits here
|
||||
/// is camera movement, which needs to be updated in a variable timestep,
|
||||
/// as you want the camera to move with as much precision and updates as
|
||||
/// the frame rate allows. A physics system that needs to read the camera
|
||||
/// position and orientation, however, should run in the fixed update logic,
|
||||
/// as it needs to be deterministic and run at a fixed rate for better stability.
|
||||
/// Note that we are not placing the camera movement system in `Update`, as that
|
||||
/// would mean that the physics system already ran at that point.
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// # use bevy_app::prelude::*;
|
||||
/// # use bevy_ecs::prelude::*;
|
||||
/// App::new()
|
||||
/// .add_systems(
|
||||
/// RunFixedMainLoop,
|
||||
/// update_camera_rotation.in_set(RunFixedMainLoopSystem::BeforeFixedMainLoop))
|
||||
/// .add_systems(FixedUpdate, update_physics);
|
||||
///
|
||||
/// # fn update_camera_rotation() {}
|
||||
/// # fn update_physics() {}
|
||||
/// ```
|
||||
BeforeFixedMainLoop,
|
||||
/// Contains the fixed update logic.
|
||||
/// Runs [`FixedMain`] zero or more times based on delta of
|
||||
/// [`Time<Virtual>`] and [`Time::overstep`].
|
||||
///
|
||||
/// Don't place systems here, use [`FixedUpdate`] and friends instead.
|
||||
/// Use this system instead to order your systems to run specifically inbetween the fixed update logic and all
|
||||
/// other systems that run in [`RunFixedMainLoopSystem::BeforeFixedMainLoop`] or [`RunFixedMainLoopSystem::AfterFixedMainLoop`].
|
||||
///
|
||||
/// [`Time<Virtual>`]: https://docs.rs/bevy/latest/bevy/prelude/struct.Virtual.html
|
||||
/// [`Time::overstep`]: https://docs.rs/bevy/latest/bevy/time/struct.Time.html#method.overstep
|
||||
/// # Example
|
||||
/// ```
|
||||
/// # use bevy_app::prelude::*;
|
||||
/// # use bevy_ecs::prelude::*;
|
||||
/// App::new()
|
||||
/// .add_systems(FixedUpdate, update_physics)
|
||||
/// .add_systems(
|
||||
/// RunFixedMainLoop,
|
||||
/// (
|
||||
/// // This system will be called before all interpolation systems
|
||||
/// // that third-party plugins might add.
|
||||
/// prepare_for_interpolation
|
||||
/// .after(RunFixedMainLoopSystem::FixedMainLoop)
|
||||
/// .before(RunFixedMainLoopSystem::AfterFixedMainLoop),
|
||||
/// )
|
||||
/// );
|
||||
///
|
||||
/// # fn prepare_for_interpolation() {}
|
||||
/// # fn update_physics() {}
|
||||
/// ```
|
||||
FixedMainLoop,
|
||||
/// Runs after the fixed update logic.
|
||||
///
|
||||
/// A good example of a system that fits here
|
||||
/// is a system that interpolates the transform of an entity between the last and current fixed update.
|
||||
/// See the [fixed timestep example] for more details.
|
||||
///
|
||||
/// [fixed timestep example]: https://github.com/bevyengine/bevy/blob/main/examples/movement/physics_in_fixed_timestep.rs
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// # use bevy_app::prelude::*;
|
||||
/// # use bevy_ecs::prelude::*;
|
||||
/// App::new()
|
||||
/// .add_systems(FixedUpdate, update_physics)
|
||||
/// .add_systems(
|
||||
/// RunFixedMainLoop,
|
||||
/// interpolate_transforms.in_set(RunFixedMainLoopSystem::AfterFixedMainLoop));
|
||||
///
|
||||
/// # fn interpolate_transforms() {}
|
||||
/// # fn update_physics() {}
|
||||
/// ```
|
||||
AfterFixedMainLoop,
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! This module provides panic handlers for [Bevy](https://bevyengine.org)
|
||||
//! apps, and automatically configures platform specifics (i.e. WASM or Android).
|
||||
//! apps, and automatically configures platform specifics (i.e. Wasm or Android).
|
||||
//!
|
||||
//! By default, the [`PanicHandlerPlugin`] from this crate is included in Bevy's `DefaultPlugins`.
|
||||
//!
|
||||
|
@ -11,7 +11,7 @@ use crate::Plugin;
|
|||
|
||||
/// Adds sensible panic handlers to Apps. This plugin is part of the `DefaultPlugins`. Adding
|
||||
/// this plugin will setup a panic hook appropriate to your target platform:
|
||||
/// * On WASM, uses [`console_error_panic_hook`](https://crates.io/crates/console_error_panic_hook), logging
|
||||
/// * On Wasm, uses [`console_error_panic_hook`](https://crates.io/crates/console_error_panic_hook), logging
|
||||
/// to the browser console.
|
||||
/// * Other platforms are currently not setup.
|
||||
///
|
||||
|
|
|
@ -120,16 +120,6 @@ impl Plugin for PlaceholderPlugin {
|
|||
fn build(&self, _app: &mut App) {}
|
||||
}
|
||||
|
||||
/// A type representing an unsafe function that returns a mutable pointer to a [`Plugin`].
|
||||
/// It is used for dynamically loading plugins.
|
||||
///
|
||||
/// See `bevy_dynamic_plugin/src/loader.rs#dynamically_load_plugin`.
|
||||
#[deprecated(
|
||||
since = "0.14.0",
|
||||
note = "The current dynamic plugin system is unsound and will be removed in 0.15."
|
||||
)]
|
||||
pub type CreatePlugin = unsafe fn() -> *mut dyn Plugin;
|
||||
|
||||
/// Types that represent a set of [`Plugin`]s.
|
||||
///
|
||||
/// This is implemented for all types which implement [`Plugin`],
|
||||
|
|
|
@ -408,6 +408,32 @@ impl SubApp {
|
|||
registry.write().register_type_data::<T, D>();
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::register_function`].
|
||||
#[cfg(feature = "reflect_functions")]
|
||||
pub fn register_function<F, Marker>(&mut self, function: F) -> &mut Self
|
||||
where
|
||||
F: bevy_reflect::func::IntoFunction<'static, Marker> + 'static,
|
||||
{
|
||||
let registry = self.world.resource_mut::<AppFunctionRegistry>();
|
||||
registry.write().register(function).unwrap();
|
||||
self
|
||||
}
|
||||
|
||||
/// See [`App::register_function_with_name`].
|
||||
#[cfg(feature = "reflect_functions")]
|
||||
pub fn register_function_with_name<F, Marker>(
|
||||
&mut self,
|
||||
name: impl Into<std::borrow::Cow<'static, str>>,
|
||||
function: F,
|
||||
) -> &mut Self
|
||||
where
|
||||
F: bevy_reflect::func::IntoFunction<'static, Marker> + 'static,
|
||||
{
|
||||
let registry = self.world.resource_mut::<AppFunctionRegistry>();
|
||||
registry.write().register_with_name(name, function).unwrap();
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// The collection of sub-apps that belong to an [`App`].
|
||||
|
|
|
@ -67,5 +67,5 @@ bevy_log = { path = "../bevy_log", version = "0.15.0-dev" }
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -22,5 +22,5 @@ quote = "1.0"
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -193,10 +193,7 @@ impl<A: Asset> DenseAssetStorage<A> {
|
|||
Entry::None => return None,
|
||||
Entry::Some { value, generation } => {
|
||||
if *generation == index.generation {
|
||||
value.take().map(|value| {
|
||||
self.len -= 1;
|
||||
value
|
||||
})
|
||||
value.take().inspect(|_| self.len -= 1)
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -515,6 +515,8 @@ pub enum UntypedAssetConversionError {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bevy_reflect::PartialReflect;
|
||||
|
||||
use super::*;
|
||||
|
||||
type TestAsset = ();
|
||||
|
@ -651,7 +653,7 @@ mod tests {
|
|||
);
|
||||
|
||||
let reflected: &dyn Reflect = &handle;
|
||||
let cloned_handle: Box<dyn Reflect> = reflected.clone_value();
|
||||
let cloned_handle: Box<dyn PartialReflect> = reflected.clone_value();
|
||||
|
||||
assert_eq!(
|
||||
Arc::strong_count(strong),
|
||||
|
|
|
@ -52,6 +52,13 @@ impl EmbeddedAssetRegistry {
|
|||
self.dir.insert_meta(asset_path, value);
|
||||
}
|
||||
|
||||
/// Removes an asset stored using `full_path` (the full path as [`file`] would return for that file, if it was capable of
|
||||
/// running in a non-rust file). If no asset is stored with at `full_path` its a no-op.
|
||||
/// It returning `Option` contains the originally stored `Data` or `None`.
|
||||
pub fn remove_asset(&self, full_path: &Path) -> Option<super::memory::Data> {
|
||||
self.dir.remove_asset(full_path)
|
||||
}
|
||||
|
||||
/// Registers a `embedded` [`AssetSource`] that uses this [`EmbeddedAssetRegistry`].
|
||||
// NOTE: unused_mut because embedded_watcher feature is the only mutable consumer of `let mut source`
|
||||
#[allow(unused_mut)]
|
||||
|
@ -300,7 +307,7 @@ macro_rules! load_internal_binary_asset {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::_embedded_asset_path;
|
||||
use super::{EmbeddedAssetRegistry, _embedded_asset_path};
|
||||
use std::path::Path;
|
||||
|
||||
// Relative paths show up if this macro is being invoked by a local crate.
|
||||
|
@ -404,4 +411,15 @@ mod tests {
|
|||
// Really, should be "my_crate/src/the/asset.png"
|
||||
assert_eq!(asset_path, Path::new("my_crate/the/asset.png"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_embedded_asset() {
|
||||
let reg = EmbeddedAssetRegistry::default();
|
||||
let path = std::path::PathBuf::from("a/b/asset.png");
|
||||
reg.insert_asset(path.clone(), &path, &[]);
|
||||
assert!(reg.dir.get_asset(&path).is_some());
|
||||
assert!(reg.remove_asset(&path).is_some());
|
||||
assert!(reg.dir.get_asset(&path).is_none());
|
||||
assert!(reg.remove_asset(&path).is_none());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -160,10 +160,7 @@ impl AssetReader for FileAssetReader {
|
|||
}
|
||||
}
|
||||
|
||||
async fn is_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> std::result::Result<bool, AssetReaderError> {
|
||||
async fn is_directory<'a>(&'a self, path: &'a Path) -> Result<bool, AssetReaderError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
let metadata = full_path
|
||||
.metadata()
|
||||
|
@ -194,35 +191,26 @@ impl AssetWriter for FileAssetWriter {
|
|||
Ok(writer)
|
||||
}
|
||||
|
||||
async fn remove<'a>(&'a self, path: &'a Path) -> std::result::Result<(), AssetWriterError> {
|
||||
async fn remove<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_file(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
async fn remove_meta<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let meta_path = get_meta_path(path);
|
||||
let full_path = self.root_path.join(meta_path);
|
||||
std::fs::remove_file(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
async fn remove_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir_all(full_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_empty_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
async fn remove_empty_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir(full_path)?;
|
||||
Ok(())
|
||||
|
@ -231,7 +219,7 @@ impl AssetWriter for FileAssetWriter {
|
|||
async fn remove_assets_in_directory<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
) -> Result<(), AssetWriterError> {
|
||||
let full_path = self.root_path.join(path);
|
||||
std::fs::remove_dir_all(&full_path)?;
|
||||
std::fs::create_dir_all(&full_path)?;
|
||||
|
@ -242,7 +230,7 @@ impl AssetWriter for FileAssetWriter {
|
|||
&'a self,
|
||||
old_path: &'a Path,
|
||||
new_path: &'a Path,
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
) -> Result<(), AssetWriterError> {
|
||||
let full_old_path = self.root_path.join(old_path);
|
||||
let full_new_path = self.root_path.join(new_path);
|
||||
if let Some(parent) = full_new_path.parent() {
|
||||
|
@ -256,7 +244,7 @@ impl AssetWriter for FileAssetWriter {
|
|||
&'a self,
|
||||
old_path: &'a Path,
|
||||
new_path: &'a Path,
|
||||
) -> std::result::Result<(), AssetWriterError> {
|
||||
) -> Result<(), AssetWriterError> {
|
||||
let old_meta_path = get_meta_path(old_path);
|
||||
let new_meta_path = get_meta_path(new_path);
|
||||
let full_old_path = self.root_path.join(old_meta_path);
|
||||
|
|
|
@ -55,6 +55,17 @@ impl Dir {
|
|||
);
|
||||
}
|
||||
|
||||
/// Removes the stored asset at `path` and returns the `Data` stored if found and otherwise `None`.
|
||||
pub fn remove_asset(&self, path: &Path) -> Option<Data> {
|
||||
let mut dir = self.clone();
|
||||
if let Some(parent) = path.parent() {
|
||||
dir = self.get_or_insert_dir(parent);
|
||||
}
|
||||
let key: Box<str> = path.file_name().unwrap().to_string_lossy().into();
|
||||
let data = dir.0.write().assets.remove(&key);
|
||||
data
|
||||
}
|
||||
|
||||
pub fn insert_meta(&self, path: &Path, value: impl Into<Value>) {
|
||||
let mut dir = self.clone();
|
||||
if let Some(parent) = path.parent() {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#[cfg(all(feature = "file_watcher", target_arch = "wasm32"))]
|
||||
compile_error!(
|
||||
"The \"file_watcher\" feature for hot reloading does not work \
|
||||
on WASM.\nDisable \"file_watcher\" \
|
||||
when compiling to WASM"
|
||||
on Wasm.\nDisable \"file_watcher\" \
|
||||
when compiling to Wasm"
|
||||
);
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
|
@ -24,13 +24,13 @@ pub use source::*;
|
|||
use bevy_utils::{BoxedFuture, ConditionalSendFuture};
|
||||
use futures_io::{AsyncRead, AsyncSeek, AsyncWrite};
|
||||
use futures_lite::{ready, Stream};
|
||||
use std::io::SeekFrom;
|
||||
use std::task::Context;
|
||||
use std::{
|
||||
io::SeekFrom,
|
||||
mem::size_of,
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
task::Poll,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use thiserror::Error;
|
||||
|
||||
|
@ -77,7 +77,7 @@ impl From<std::io::Error> for AssetReaderError {
|
|||
// Ideally this would be even smaller (ReadToEndFuture only needs space for two references based on its definition),
|
||||
// but compiler optimizations can apparently inflate the stack size of futures due to inlining, which makes
|
||||
// a higher maximum necessary.
|
||||
pub const STACK_FUTURE_SIZE: usize = 10 * std::mem::size_of::<&()>();
|
||||
pub const STACK_FUTURE_SIZE: usize = 10 * size_of::<&()>();
|
||||
|
||||
pub use stackfuture::StackFuture;
|
||||
|
||||
|
@ -520,7 +520,7 @@ impl VecReader {
|
|||
impl AsyncRead for VecReader {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut [u8],
|
||||
) -> Poll<futures_io::Result<usize>> {
|
||||
if self.bytes_read >= self.bytes.len() {
|
||||
|
|
|
@ -137,7 +137,7 @@ impl AsyncRead for TransactionLockedReader<'_> {
|
|||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
buf: &mut [u8],
|
||||
) -> std::task::Poll<futures_io::Result<usize>> {
|
||||
) -> Poll<futures_io::Result<usize>> {
|
||||
Pin::new(&mut self.reader).poll_read(cx, buf)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,9 +70,26 @@ impl<'a> AssetSourceId<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&'static str> for AssetSourceId<'static> {
|
||||
fn from(value: &'static str) -> Self {
|
||||
AssetSourceId::Name(value.into())
|
||||
impl AssetSourceId<'static> {
|
||||
/// Indicates this [`AssetSourceId`] should have a static lifetime.
|
||||
#[inline]
|
||||
pub fn as_static(self) -> Self {
|
||||
match self {
|
||||
Self::Default => Self::Default,
|
||||
Self::Name(value) => Self::Name(value.as_static()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs an [`AssetSourceId`] with a static lifetime.
|
||||
#[inline]
|
||||
pub fn from_static(value: impl Into<Self>) -> Self {
|
||||
value.into().as_static()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for AssetSourceId<'a> {
|
||||
fn from(value: &'a str) -> Self {
|
||||
AssetSourceId::Name(CowArc::Borrowed(value))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,10 +99,10 @@ impl<'a, 'b> From<&'a AssetSourceId<'b>> for AssetSourceId<'b> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<Option<&'static str>> for AssetSourceId<'static> {
|
||||
fn from(value: Option<&'static str>) -> Self {
|
||||
impl<'a> From<Option<&'a str>> for AssetSourceId<'a> {
|
||||
fn from(value: Option<&'a str>) -> Self {
|
||||
match value {
|
||||
Some(value) => AssetSourceId::Name(value.into()),
|
||||
Some(value) => AssetSourceId::Name(CowArc::Borrowed(value)),
|
||||
None => AssetSourceId::Default,
|
||||
}
|
||||
}
|
||||
|
@ -302,7 +319,7 @@ pub struct AssetSourceBuilders {
|
|||
impl AssetSourceBuilders {
|
||||
/// Inserts a new builder with the given `id`
|
||||
pub fn insert(&mut self, id: impl Into<AssetSourceId<'static>>, source: AssetSourceBuilder) {
|
||||
match id.into() {
|
||||
match AssetSourceId::from_static(id) {
|
||||
AssetSourceId::Default => {
|
||||
self.default = Some(source);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ extern "C" {
|
|||
fn worker(this: &Global) -> JsValue;
|
||||
}
|
||||
|
||||
/// Reader implementation for loading assets via HTTP in WASM.
|
||||
/// Reader implementation for loading assets via HTTP in Wasm.
|
||||
pub struct HttpWasmAssetReader {
|
||||
root_path: PathBuf,
|
||||
}
|
||||
|
|
|
@ -341,7 +341,7 @@ impl AssetApp for App {
|
|||
id: impl Into<AssetSourceId<'static>>,
|
||||
source: AssetSourceBuilder,
|
||||
) -> &mut Self {
|
||||
let id = id.into();
|
||||
let id = AssetSourceId::from_static(id);
|
||||
if self.world().get_resource::<AssetServer>().is_some() {
|
||||
error!("{} must be registered before `AssetPlugin` (typically added as part of `DefaultPlugins`)", id);
|
||||
}
|
||||
|
@ -583,13 +583,10 @@ mod tests {
|
|||
async fn read_meta<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> Result<impl bevy_asset::io::Reader + 'a, AssetReaderError> {
|
||||
) -> Result<impl Reader + 'a, AssetReaderError> {
|
||||
self.memory_reader.read_meta(path).await
|
||||
}
|
||||
async fn read<'a>(
|
||||
&'a self,
|
||||
path: &'a Path,
|
||||
) -> Result<impl bevy_asset::io::Reader + 'a, bevy_asset::io::AssetReaderError> {
|
||||
async fn read<'a>(&'a self, path: &'a Path) -> Result<impl Reader + 'a, AssetReaderError> {
|
||||
let attempt_number = {
|
||||
let mut attempt_counters = self.attempt_counters.lock().unwrap();
|
||||
if let Some(existing) = attempt_counters.get_mut(path) {
|
||||
|
|
|
@ -21,7 +21,7 @@ use thiserror::Error;
|
|||
/// should be loaded.
|
||||
pub trait AssetLoader: Send + Sync + 'static {
|
||||
/// The top level [`Asset`] loaded by this [`AssetLoader`].
|
||||
type Asset: crate::Asset;
|
||||
type Asset: Asset;
|
||||
/// The settings type used by this [`AssetLoader`].
|
||||
type Settings: Settings + Default + Serialize + for<'a> Deserialize<'a>;
|
||||
/// The type of [error](`std::error::Error`) which could be encountered by this loader.
|
||||
|
|
|
@ -475,14 +475,43 @@ impl<'a> AssetPath<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&'static str> for AssetPath<'static> {
|
||||
impl AssetPath<'static> {
|
||||
/// Indicates this [`AssetPath`] should have a static lifetime.
|
||||
#[inline]
|
||||
fn from(asset_path: &'static str) -> Self {
|
||||
pub fn as_static(self) -> Self {
|
||||
let Self {
|
||||
source,
|
||||
path,
|
||||
label,
|
||||
} = self;
|
||||
|
||||
let source = source.as_static();
|
||||
let path = path.as_static();
|
||||
let label = label.map(CowArc::as_static);
|
||||
|
||||
Self {
|
||||
source,
|
||||
path,
|
||||
label,
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs an [`AssetPath`] with a static lifetime.
|
||||
#[inline]
|
||||
pub fn from_static(value: impl Into<Self>) -> Self {
|
||||
value.into().as_static()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for AssetPath<'a> {
|
||||
#[inline]
|
||||
fn from(asset_path: &'a str) -> Self {
|
||||
let (source, path, label) = Self::parse_internal(asset_path).unwrap();
|
||||
|
||||
AssetPath {
|
||||
source: source.into(),
|
||||
path: CowArc::Static(path),
|
||||
label: label.map(CowArc::Static),
|
||||
path: CowArc::Borrowed(path),
|
||||
label: label.map(CowArc::Borrowed),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -501,12 +530,12 @@ impl From<String> for AssetPath<'static> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&'static Path> for AssetPath<'static> {
|
||||
impl<'a> From<&'a Path> for AssetPath<'a> {
|
||||
#[inline]
|
||||
fn from(path: &'static Path) -> Self {
|
||||
fn from(path: &'a Path) -> Self {
|
||||
Self {
|
||||
source: AssetSourceId::Default,
|
||||
path: CowArc::Static(path),
|
||||
path: CowArc::Borrowed(path),
|
||||
label: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,6 +91,10 @@ impl AssetProcessor {
|
|||
Self { server, data }
|
||||
}
|
||||
|
||||
pub fn data(&self) -> &Arc<AssetProcessorData> {
|
||||
&self.data
|
||||
}
|
||||
|
||||
/// The "internal" [`AssetServer`] used by the [`AssetProcessor`]. This is _separate_ from the asset processor used by
|
||||
/// the main App. It has different processor-specific configuration and a different ID space.
|
||||
pub fn server(&self) -> &AssetServer {
|
||||
|
@ -153,7 +157,7 @@ impl AssetProcessor {
|
|||
/// Starts the processor in a background thread.
|
||||
pub fn start(_processor: Res<Self>) {
|
||||
#[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))]
|
||||
error!("Cannot run AssetProcessor in single threaded mode (or WASM) yet.");
|
||||
error!("Cannot run AssetProcessor in single threaded mode (or Wasm) yet.");
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "multi_threaded"))]
|
||||
{
|
||||
let processor = _processor.clone();
|
||||
|
@ -323,7 +327,7 @@ impl AssetProcessor {
|
|||
AssetPath::from_path(&path).with_source(source.id())
|
||||
);
|
||||
#[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))]
|
||||
error!("AddFolder event cannot be handled in single threaded mode (or WASM) yet.");
|
||||
error!("AddFolder event cannot be handled in single threaded mode (or Wasm) yet.");
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "multi_threaded"))]
|
||||
IoTaskPool::get().scope(|scope| {
|
||||
scope.spawn(async move {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::any::{Any, TypeId};
|
||||
|
||||
use bevy_ecs::world::{unsafe_world_cell::UnsafeWorldCell, World};
|
||||
use bevy_reflect::{FromReflect, FromType, Reflect};
|
||||
use bevy_reflect::{FromReflect, FromType, PartialReflect, Reflect};
|
||||
|
||||
use crate::{Asset, AssetId, Assets, Handle, UntypedAssetId, UntypedHandle};
|
||||
|
||||
|
@ -22,8 +22,8 @@ pub struct ReflectAsset {
|
|||
// - may only be called with an [`UnsafeWorldCell`] which can be used to access the corresponding `Assets<T>` resource mutably
|
||||
// - may only be used to access **at most one** access at once
|
||||
get_unchecked_mut: unsafe fn(UnsafeWorldCell<'_>, UntypedHandle) -> Option<&mut dyn Reflect>,
|
||||
add: fn(&mut World, &dyn Reflect) -> UntypedHandle,
|
||||
insert: fn(&mut World, UntypedHandle, &dyn Reflect),
|
||||
add: fn(&mut World, &dyn PartialReflect) -> UntypedHandle,
|
||||
insert: fn(&mut World, UntypedHandle, &dyn PartialReflect),
|
||||
len: fn(&World) -> usize,
|
||||
ids: for<'w> fn(&'w World) -> Box<dyn Iterator<Item = UntypedAssetId> + 'w>,
|
||||
remove: fn(&mut World, UntypedHandle) -> Option<Box<dyn Reflect>>,
|
||||
|
@ -94,11 +94,11 @@ impl ReflectAsset {
|
|||
}
|
||||
|
||||
/// Equivalent of [`Assets::add`]
|
||||
pub fn add(&self, world: &mut World, value: &dyn Reflect) -> UntypedHandle {
|
||||
pub fn add(&self, world: &mut World, value: &dyn PartialReflect) -> UntypedHandle {
|
||||
(self.add)(world, value)
|
||||
}
|
||||
/// Equivalent of [`Assets::insert`]
|
||||
pub fn insert(&self, world: &mut World, handle: UntypedHandle, value: &dyn Reflect) {
|
||||
pub fn insert(&self, world: &mut World, handle: UntypedHandle, value: &dyn PartialReflect) {
|
||||
(self.insert)(world, handle, value);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::{
|
|||
UntypedAssetId, UntypedHandle,
|
||||
};
|
||||
use bevy_ecs::world::World;
|
||||
use bevy_tasks::Task;
|
||||
use bevy_utils::tracing::warn;
|
||||
use bevy_utils::{Entry, HashMap, HashSet, TypeIdMap};
|
||||
use crossbeam_channel::Sender;
|
||||
|
@ -76,6 +77,7 @@ pub(crate) struct AssetInfos {
|
|||
pub(crate) dependency_loaded_event_sender: TypeIdMap<fn(&mut World, UntypedAssetId)>,
|
||||
pub(crate) dependency_failed_event_sender:
|
||||
TypeIdMap<fn(&mut World, UntypedAssetId, AssetPath<'static>, AssetLoadError)>,
|
||||
pub(crate) pending_tasks: HashMap<UntypedAssetId, Task<()>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AssetInfos {
|
||||
|
@ -364,6 +366,7 @@ impl AssetInfos {
|
|||
&mut self.path_to_id,
|
||||
&mut self.loader_dependants,
|
||||
&mut self.living_labeled_assets,
|
||||
&mut self.pending_tasks,
|
||||
self.watching_for_changes,
|
||||
id,
|
||||
)
|
||||
|
@ -587,6 +590,11 @@ impl AssetInfos {
|
|||
}
|
||||
|
||||
pub(crate) fn process_asset_fail(&mut self, failed_id: UntypedAssetId, error: AssetLoadError) {
|
||||
// Check whether the handle has been dropped since the asset was loaded.
|
||||
if !self.infos.contains_key(&failed_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
let (dependants_waiting_on_load, dependants_waiting_on_rec_load) = {
|
||||
let Some(info) = self.get_mut(failed_id) else {
|
||||
// The asset was already dropped.
|
||||
|
@ -648,6 +656,7 @@ impl AssetInfos {
|
|||
path_to_id: &mut HashMap<AssetPath<'static>, TypeIdMap<UntypedAssetId>>,
|
||||
loader_dependants: &mut HashMap<AssetPath<'static>, HashSet<AssetPath<'static>>>,
|
||||
living_labeled_assets: &mut HashMap<AssetPath<'static>, HashSet<Box<str>>>,
|
||||
pending_tasks: &mut HashMap<UntypedAssetId, Task<()>>,
|
||||
watching_for_changes: bool,
|
||||
id: UntypedAssetId,
|
||||
) -> bool {
|
||||
|
@ -662,6 +671,8 @@ impl AssetInfos {
|
|||
return false;
|
||||
}
|
||||
|
||||
pending_tasks.remove(&id);
|
||||
|
||||
let type_id = entry.key().type_id();
|
||||
|
||||
let info = entry.remove();
|
||||
|
@ -704,6 +715,7 @@ impl AssetInfos {
|
|||
&mut self.path_to_id,
|
||||
&mut self.loader_dependants,
|
||||
&mut self.living_labeled_assets,
|
||||
&mut self.pending_tasks,
|
||||
self.watching_for_changes,
|
||||
id.untyped(provider.type_id),
|
||||
);
|
||||
|
|
|
@ -22,13 +22,13 @@ use bevy_tasks::IoTaskPool;
|
|||
use bevy_utils::tracing::{error, info};
|
||||
use bevy_utils::{CowArc, HashSet};
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use futures_lite::StreamExt;
|
||||
use futures_lite::{FutureExt, StreamExt};
|
||||
use info::*;
|
||||
use loaders::*;
|
||||
use parking_lot::RwLock;
|
||||
use std::future::Future;
|
||||
use std::{any::Any, path::PathBuf};
|
||||
use std::{any::TypeId, path::Path, sync::Arc};
|
||||
use std::{future::Future, panic::AssertUnwindSafe};
|
||||
use thiserror::Error;
|
||||
|
||||
// Needed for doc string
|
||||
|
@ -368,7 +368,8 @@ impl AssetServer {
|
|||
guard: G,
|
||||
) -> Handle<A> {
|
||||
let path = path.into().into_owned();
|
||||
let (handle, should_load) = self.data.infos.write().get_or_create_path_handle::<A>(
|
||||
let mut infos = self.data.infos.write();
|
||||
let (handle, should_load) = infos.get_or_create_path_handle::<A>(
|
||||
path.clone(),
|
||||
HandleLoadingMode::Request,
|
||||
meta_transform,
|
||||
|
@ -377,14 +378,18 @@ impl AssetServer {
|
|||
if should_load {
|
||||
let owned_handle = Some(handle.clone().untyped());
|
||||
let server = self.clone();
|
||||
IoTaskPool::get()
|
||||
.spawn(async move {
|
||||
if let Err(err) = server.load_internal(owned_handle, path, false, None).await {
|
||||
error!("{}", err);
|
||||
}
|
||||
drop(guard);
|
||||
})
|
||||
.detach();
|
||||
let task = IoTaskPool::get().spawn(async move {
|
||||
if let Err(err) = server.load_internal(owned_handle, path, false, None).await {
|
||||
error!("{}", err);
|
||||
}
|
||||
drop(guard);
|
||||
});
|
||||
|
||||
#[cfg(not(any(target_arch = "wasm32", not(feature = "multi_threaded"))))]
|
||||
infos.pending_tasks.insert(handle.id().untyped(), task);
|
||||
|
||||
#[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))]
|
||||
task.detach();
|
||||
}
|
||||
|
||||
handle
|
||||
|
@ -414,44 +419,47 @@ impl AssetServer {
|
|||
CowArc::Owned(format!("{source}--{UNTYPED_SOURCE_SUFFIX}").into())
|
||||
}
|
||||
});
|
||||
let (handle, should_load) = self
|
||||
.data
|
||||
.infos
|
||||
.write()
|
||||
.get_or_create_path_handle::<LoadedUntypedAsset>(
|
||||
path.clone().with_source(untyped_source),
|
||||
HandleLoadingMode::Request,
|
||||
meta_transform,
|
||||
);
|
||||
let mut infos = self.data.infos.write();
|
||||
let (handle, should_load) = infos.get_or_create_path_handle::<LoadedUntypedAsset>(
|
||||
path.clone().with_source(untyped_source),
|
||||
HandleLoadingMode::Request,
|
||||
meta_transform,
|
||||
);
|
||||
if !should_load {
|
||||
return handle;
|
||||
}
|
||||
let id = handle.id().untyped();
|
||||
let owned_handle = Some(handle.clone().untyped());
|
||||
|
||||
let server = self.clone();
|
||||
IoTaskPool::get()
|
||||
.spawn(async move {
|
||||
let path_clone = path.clone();
|
||||
match server.load_untyped_async(path).await {
|
||||
Ok(handle) => server.send_asset_event(InternalAssetEvent::Loaded {
|
||||
let task = IoTaskPool::get().spawn(async move {
|
||||
let path_clone = path.clone();
|
||||
match server.load_internal(owned_handle, path, false, None).await {
|
||||
Ok(handle) => server.send_asset_event(InternalAssetEvent::Loaded {
|
||||
id,
|
||||
loaded_asset: LoadedAsset::new_with_dependencies(
|
||||
LoadedUntypedAsset { handle },
|
||||
None,
|
||||
)
|
||||
.into(),
|
||||
}),
|
||||
Err(err) => {
|
||||
error!("{err}");
|
||||
server.send_asset_event(InternalAssetEvent::Failed {
|
||||
id,
|
||||
loaded_asset: LoadedAsset::new_with_dependencies(
|
||||
LoadedUntypedAsset { handle },
|
||||
None,
|
||||
)
|
||||
.into(),
|
||||
}),
|
||||
Err(err) => {
|
||||
error!("{err}");
|
||||
server.send_asset_event(InternalAssetEvent::Failed {
|
||||
id,
|
||||
path: path_clone,
|
||||
error: err,
|
||||
});
|
||||
}
|
||||
path: path_clone,
|
||||
error: err,
|
||||
});
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
});
|
||||
|
||||
#[cfg(not(any(target_arch = "wasm32", not(feature = "multi_threaded"))))]
|
||||
infos.pending_tasks.insert(handle.id().untyped(), task);
|
||||
|
||||
#[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))]
|
||||
task.detach();
|
||||
|
||||
handle
|
||||
}
|
||||
|
||||
|
@ -488,7 +496,7 @@ impl AssetServer {
|
|||
/// avoid looking up `should_load` twice, but it means you _must_ be sure a load is necessary when calling this function with [`Some`].
|
||||
async fn load_internal<'a>(
|
||||
&self,
|
||||
input_handle: Option<UntypedHandle>,
|
||||
mut input_handle: Option<UntypedHandle>,
|
||||
path: AssetPath<'a>,
|
||||
force: bool,
|
||||
meta_transform: Option<MetaTransform>,
|
||||
|
@ -500,7 +508,7 @@ impl AssetServer {
|
|||
let (mut meta, loader, mut reader) = self
|
||||
.get_meta_loader_and_reader(&path_clone, asset_type_id)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
.inspect_err(|e| {
|
||||
// if there was an input handle, a "load" operation has already started, so we must produce a "failure" event, if
|
||||
// we cannot find the meta and loader
|
||||
if let Some(handle) = &input_handle {
|
||||
|
@ -510,9 +518,15 @@ impl AssetServer {
|
|||
error: e.clone(),
|
||||
});
|
||||
}
|
||||
e
|
||||
})?;
|
||||
|
||||
if let Some(meta_transform) = input_handle.as_ref().and_then(|h| h.meta_transform()) {
|
||||
(*meta_transform)(&mut *meta);
|
||||
}
|
||||
// downgrade the input handle so we don't keep the asset alive just because we're loading it
|
||||
// note we can't just pass a weak handle in, as only strong handles contain the asset meta transform
|
||||
input_handle = input_handle.map(|h| h.clone_weak());
|
||||
|
||||
// This contains Some(UntypedHandle), if it was retrievable
|
||||
// If it is None, that is because it was _not_ retrievable, due to
|
||||
// 1. The handle was not already passed in for this path, meaning we can't just use that
|
||||
|
@ -581,10 +595,6 @@ impl AssetServer {
|
|||
(handle.clone().unwrap(), path.clone())
|
||||
};
|
||||
|
||||
if let Some(meta_transform) = base_handle.meta_transform() {
|
||||
(*meta_transform)(&mut *meta);
|
||||
}
|
||||
|
||||
match self
|
||||
.load_with_meta_loader_and_reader(&base_path, meta, &*loader, &mut *reader, true, false)
|
||||
.await
|
||||
|
@ -722,40 +732,42 @@ impl AssetServer {
|
|||
&self,
|
||||
future: impl Future<Output = Result<A, E>> + Send + 'static,
|
||||
) -> Handle<A> {
|
||||
let handle = self
|
||||
.data
|
||||
.infos
|
||||
.write()
|
||||
.create_loading_handle_untyped(std::any::TypeId::of::<A>(), std::any::type_name::<A>());
|
||||
let mut infos = self.data.infos.write();
|
||||
let handle =
|
||||
infos.create_loading_handle_untyped(TypeId::of::<A>(), std::any::type_name::<A>());
|
||||
let id = handle.id();
|
||||
|
||||
let event_sender = self.data.asset_event_sender.clone();
|
||||
|
||||
IoTaskPool::get()
|
||||
.spawn(async move {
|
||||
match future.await {
|
||||
Ok(asset) => {
|
||||
let loaded_asset = LoadedAsset::new_with_dependencies(asset, None).into();
|
||||
event_sender
|
||||
.send(InternalAssetEvent::Loaded { id, loaded_asset })
|
||||
.unwrap();
|
||||
}
|
||||
Err(error) => {
|
||||
let error = AddAsyncError {
|
||||
error: Arc::new(error),
|
||||
};
|
||||
error!("{error}");
|
||||
event_sender
|
||||
.send(InternalAssetEvent::Failed {
|
||||
id,
|
||||
path: Default::default(),
|
||||
error: AssetLoadError::AddAsyncError(error),
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
let task = IoTaskPool::get().spawn(async move {
|
||||
match future.await {
|
||||
Ok(asset) => {
|
||||
let loaded_asset = LoadedAsset::new_with_dependencies(asset, None).into();
|
||||
event_sender
|
||||
.send(InternalAssetEvent::Loaded { id, loaded_asset })
|
||||
.unwrap();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
Err(error) => {
|
||||
let error = AddAsyncError {
|
||||
error: Arc::new(error),
|
||||
};
|
||||
error!("{error}");
|
||||
event_sender
|
||||
.send(InternalAssetEvent::Failed {
|
||||
id,
|
||||
path: Default::default(),
|
||||
error: AssetLoadError::AddAsyncError(error),
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
#[cfg(not(any(target_arch = "wasm32", not(feature = "multi_threaded"))))]
|
||||
infos.pending_tasks.insert(id, task);
|
||||
|
||||
#[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))]
|
||||
task.detach();
|
||||
|
||||
handle.typed_debug_checked()
|
||||
}
|
||||
|
@ -1177,13 +1189,20 @@ impl AssetServer {
|
|||
let asset_path = asset_path.clone_owned();
|
||||
let load_context =
|
||||
LoadContext::new(self, asset_path.clone(), load_dependencies, populate_hashes);
|
||||
loader.load(reader, meta, load_context).await.map_err(|e| {
|
||||
AssetLoadError::AssetLoaderError(AssetLoaderError {
|
||||
AssertUnwindSafe(loader.load(reader, meta, load_context))
|
||||
.catch_unwind()
|
||||
.await
|
||||
.map_err(|_| AssetLoadError::AssetLoaderPanic {
|
||||
path: asset_path.clone_owned(),
|
||||
loader_name: loader.type_name(),
|
||||
error: e.into(),
|
||||
})?
|
||||
.map_err(|e| {
|
||||
AssetLoadError::AssetLoaderError(AssetLoaderError {
|
||||
path: asset_path.clone_owned(),
|
||||
loader_name: loader.type_name(),
|
||||
error: e.into(),
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1306,6 +1325,11 @@ pub fn handle_internal_asset_events(world: &mut World) {
|
|||
info!("Reloading {path} because it has changed");
|
||||
server.reload(path);
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_arch = "wasm32", not(feature = "multi_threaded"))))]
|
||||
infos
|
||||
.pending_tasks
|
||||
.retain(|_, load_task| !load_task.is_finished());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1405,6 +1429,11 @@ pub enum AssetLoadError {
|
|||
CannotLoadProcessedAsset { path: AssetPath<'static> },
|
||||
#[error("Asset '{path}' is configured to be ignored. It cannot be loaded.")]
|
||||
CannotLoadIgnoredAsset { path: AssetPath<'static> },
|
||||
#[error("Failed to load asset '{path}', asset loader '{loader_name}' panicked")]
|
||||
AssetLoaderPanic {
|
||||
path: AssetPath<'static>,
|
||||
loader_name: &'static str,
|
||||
},
|
||||
#[error(transparent)]
|
||||
AssetLoaderError(#[from] AssetLoaderError),
|
||||
#[error(transparent)]
|
||||
|
|
|
@ -52,5 +52,5 @@ android_shared_stdcxx = ["cpal/oboe-shared-stdcxx"]
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -17,8 +17,8 @@ bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev", features = [
|
|||
bytemuck = { version = "1", features = ["derive"] }
|
||||
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||
thiserror = "1.0"
|
||||
wgpu-types = { version = "0.20", default-features = false, optional = true }
|
||||
encase = { version = "0.8", default-features = false }
|
||||
wgpu-types = { version = "22", default-features = false, optional = true }
|
||||
encase = { version = "0.9", default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["bevy_reflect"]
|
||||
|
@ -28,5 +28,5 @@ serialize = ["serde"]
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -183,7 +183,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_gray() {
|
||||
verify_gray::<crate::Hsla>();
|
||||
verify_gray::<Hsla>();
|
||||
verify_gray::<crate::Hsva>();
|
||||
verify_gray::<crate::Hwba>();
|
||||
verify_gray::<crate::Laba>();
|
||||
|
|
|
@ -157,7 +157,11 @@ impl From<Hwba> for Hsva {
|
|||
) -> Self {
|
||||
// Based on https://en.wikipedia.org/wiki/HWB_color_model#Conversion
|
||||
let value = 1. - blackness;
|
||||
let saturation = 1. - (whiteness / value);
|
||||
let saturation = if value != 0. {
|
||||
1. - (whiteness / value)
|
||||
} else {
|
||||
0.
|
||||
};
|
||||
|
||||
Hsva::new(hue, saturation, value, alpha)
|
||||
}
|
||||
|
|
|
@ -118,7 +118,7 @@ impl Mix for Lcha {
|
|||
Self {
|
||||
lightness: self.lightness * n_factor + other.lightness * factor,
|
||||
chroma: self.chroma * n_factor + other.chroma * factor,
|
||||
hue: self.hue * n_factor + other.hue * factor,
|
||||
hue: crate::color_ops::lerp_hue(self.hue, other.hue, factor),
|
||||
alpha: self.alpha * n_factor + other.alpha * factor,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ impl Mix for Oklcha {
|
|||
Self {
|
||||
lightness: self.lightness * n_factor + other.lightness * factor,
|
||||
chroma: self.chroma * n_factor + other.chroma * factor,
|
||||
hue: self.hue * n_factor + other.hue * factor,
|
||||
hue: crate::color_ops::lerp_hue(self.hue, other.hue, factor),
|
||||
alpha: self.alpha * n_factor + other.alpha * factor,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,5 +39,5 @@ serde_test = "1.0"
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -45,5 +45,5 @@ thiserror = "1.0"
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
|
@ -42,6 +42,9 @@ pub struct AutoExposureCompensationCurve {
|
|||
/// Various errors that can occur when constructing an [`AutoExposureCompensationCurve`].
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AutoExposureCompensationCurveError {
|
||||
/// The curve couldn't be built in the first place.
|
||||
#[error("curve could not be constructed from the given data")]
|
||||
InvalidCurve,
|
||||
/// A discontinuity was found in the curve.
|
||||
#[error("discontinuity found between curve segments")]
|
||||
DiscontinuityFound,
|
||||
|
@ -99,7 +102,9 @@ impl AutoExposureCompensationCurve {
|
|||
where
|
||||
T: CubicGenerator<Vec2>,
|
||||
{
|
||||
let curve = curve.to_curve();
|
||||
let Ok(curve) = curve.to_curve() else {
|
||||
return Err(AutoExposureCompensationCurveError::InvalidCurve);
|
||||
};
|
||||
|
||||
let min_log_lum = curve.position(0.0).x;
|
||||
let max_log_lum = curve.position(curve.segments().len() as f32).x;
|
||||
|
|
|
@ -110,7 +110,7 @@ impl FromWorld for AutoExposureResources {
|
|||
|
||||
fn queue_view_auto_exposure_pipelines(
|
||||
mut commands: Commands,
|
||||
pipeline_cache: ResMut<PipelineCache>,
|
||||
pipeline_cache: Res<PipelineCache>,
|
||||
mut compute_pipelines: ResMut<SpecializedComputePipelines<AutoExposurePipeline>>,
|
||||
pipeline: Res<AutoExposurePipeline>,
|
||||
view_targets: Query<(Entity, &AutoExposureSettings)>,
|
||||
|
|
|
@ -10,6 +10,7 @@ struct BloomUniforms {
|
|||
threshold_precomputations: vec4<f32>,
|
||||
viewport: vec4<f32>,
|
||||
aspect: f32,
|
||||
uv_offset: f32
|
||||
};
|
||||
|
||||
@group(0) @binding(0) var input_texture: texture_2d<f32>;
|
||||
|
@ -94,9 +95,9 @@ fn sample_input_13_tap(uv: vec2<f32>) -> vec3<f32> {
|
|||
|
||||
// [COD] slide 162
|
||||
fn sample_input_3x3_tent(uv: vec2<f32>) -> vec3<f32> {
|
||||
// Radius. Empirically chosen by and tweaked from the LearnOpenGL article.
|
||||
let x = 0.004 / uniforms.aspect;
|
||||
let y = 0.004;
|
||||
// UV offsets configured from uniforms.
|
||||
let x = uniforms.uv_offset / uniforms.aspect;
|
||||
let y = uniforms.uv_offset;
|
||||
|
||||
let a = textureSample(input_texture, s, vec2<f32>(uv.x - x, uv.y + y)).rgb;
|
||||
let b = textureSample(input_texture, s, vec2<f32>(uv.x, uv.y + y)).rgb;
|
||||
|
|
|
@ -41,6 +41,7 @@ pub struct BloomUniforms {
|
|||
pub threshold_precomputations: Vec4,
|
||||
pub viewport: Vec4,
|
||||
pub aspect: f32,
|
||||
pub uv_offset: f32,
|
||||
}
|
||||
|
||||
impl FromWorld for BloomDownsamplingPipeline {
|
||||
|
|
|
@ -38,10 +38,6 @@ const BLOOM_SHADER_HANDLE: Handle<Shader> = Handle::weak_from_u128(9295994769239
|
|||
|
||||
const BLOOM_TEXTURE_FORMAT: TextureFormat = TextureFormat::Rg11b10Float;
|
||||
|
||||
// Maximum size of each dimension for the largest mipchain texture used in downscaling/upscaling.
|
||||
// 512 behaves well with the UV offset of 0.004 used in bloom.wgsl
|
||||
const MAX_MIP_DIMENSION: u32 = 512;
|
||||
|
||||
pub struct BloomPlugin;
|
||||
|
||||
impl Plugin for BloomPlugin {
|
||||
|
@ -328,17 +324,21 @@ fn prepare_bloom_textures(
|
|||
mut commands: Commands,
|
||||
mut texture_cache: ResMut<TextureCache>,
|
||||
render_device: Res<RenderDevice>,
|
||||
views: Query<(Entity, &ExtractedCamera), With<BloomSettings>>,
|
||||
views: Query<(Entity, &ExtractedCamera, &BloomSettings)>,
|
||||
) {
|
||||
for (entity, camera) in &views {
|
||||
for (entity, camera, settings) in &views {
|
||||
if let Some(UVec2 {
|
||||
x: width,
|
||||
y: height,
|
||||
}) = camera.physical_viewport_size
|
||||
{
|
||||
// How many times we can halve the resolution minus one so we don't go unnecessarily low
|
||||
let mip_count = MAX_MIP_DIMENSION.ilog2().max(2) - 1;
|
||||
let mip_height_ratio = MAX_MIP_DIMENSION as f32 / height as f32;
|
||||
let mip_count = settings.max_mip_dimension.ilog2().max(2) - 1;
|
||||
let mip_height_ratio = if height != 0 {
|
||||
settings.max_mip_dimension as f32 / height as f32
|
||||
} else {
|
||||
0.
|
||||
};
|
||||
|
||||
let texture_descriptor = TextureDescriptor {
|
||||
label: Some("bloom_texture"),
|
||||
|
|
|
@ -102,9 +102,20 @@ pub struct BloomSettings {
|
|||
/// configured in a non-energy-conserving way,
|
||||
/// otherwise set to [`BloomCompositeMode::EnergyConserving`].
|
||||
pub composite_mode: BloomCompositeMode,
|
||||
|
||||
/// Maximum size of each dimension for the largest mipchain texture used in downscaling/upscaling.
|
||||
/// Only tweak if you are seeing visual artifacts.
|
||||
pub max_mip_dimension: u32,
|
||||
|
||||
/// UV offset for bloom shader. Ideally close to 2.0 / `max_mip_dimension`.
|
||||
/// Only tweak if you are seeing visual artifacts.
|
||||
pub uv_offset: f32,
|
||||
}
|
||||
|
||||
impl BloomSettings {
|
||||
const DEFAULT_MAX_MIP_DIMENSION: u32 = 512;
|
||||
const DEFAULT_UV_OFFSET: f32 = 0.004;
|
||||
|
||||
/// The default bloom preset.
|
||||
///
|
||||
/// This uses the [`EnergyConserving`](BloomCompositeMode::EnergyConserving) composite mode.
|
||||
|
@ -118,6 +129,8 @@ impl BloomSettings {
|
|||
threshold_softness: 0.0,
|
||||
},
|
||||
composite_mode: BloomCompositeMode::EnergyConserving,
|
||||
max_mip_dimension: Self::DEFAULT_MAX_MIP_DIMENSION,
|
||||
uv_offset: Self::DEFAULT_UV_OFFSET,
|
||||
};
|
||||
|
||||
/// A preset that's similar to how older games did bloom.
|
||||
|
@ -131,6 +144,8 @@ impl BloomSettings {
|
|||
threshold_softness: 0.2,
|
||||
},
|
||||
composite_mode: BloomCompositeMode::Additive,
|
||||
max_mip_dimension: Self::DEFAULT_MAX_MIP_DIMENSION,
|
||||
uv_offset: Self::DEFAULT_UV_OFFSET,
|
||||
};
|
||||
|
||||
/// A preset that applies a very strong bloom, and blurs the whole screen.
|
||||
|
@ -144,6 +159,8 @@ impl BloomSettings {
|
|||
threshold_softness: 0.0,
|
||||
},
|
||||
composite_mode: BloomCompositeMode::EnergyConserving,
|
||||
max_mip_dimension: Self::DEFAULT_MAX_MIP_DIMENSION,
|
||||
uv_offset: Self::DEFAULT_UV_OFFSET,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -213,6 +230,7 @@ impl ExtractComponent for BloomSettings {
|
|||
/ UVec4::new(target_size.x, target_size.y, target_size.x, target_size.y)
|
||||
.as_vec4(),
|
||||
aspect: AspectRatio::from_pixels(size.x, size.y).into(),
|
||||
uv_offset: settings.uv_offset,
|
||||
};
|
||||
|
||||
Some((settings.clone(), uniform))
|
||||
|
|
|
@ -23,7 +23,7 @@ use bevy_render::{
|
|||
|
||||
mod node;
|
||||
|
||||
pub use node::CASNode;
|
||||
pub use node::CasNode;
|
||||
|
||||
/// Applies a contrast adaptive sharpening (CAS) filter to the camera.
|
||||
///
|
||||
|
@ -66,28 +66,28 @@ impl Default for ContrastAdaptiveSharpeningSettings {
|
|||
|
||||
#[derive(Component, Default, Reflect, Clone)]
|
||||
#[reflect(Component)]
|
||||
pub struct DenoiseCAS(bool);
|
||||
pub struct DenoiseCas(bool);
|
||||
|
||||
/// The uniform struct extracted from [`ContrastAdaptiveSharpeningSettings`] attached to a [`Camera`].
|
||||
/// Will be available for use in the CAS shader.
|
||||
#[doc(hidden)]
|
||||
#[derive(Component, ShaderType, Clone)]
|
||||
pub struct CASUniform {
|
||||
pub struct CasUniform {
|
||||
sharpness: f32,
|
||||
}
|
||||
|
||||
impl ExtractComponent for ContrastAdaptiveSharpeningSettings {
|
||||
type QueryData = &'static Self;
|
||||
type QueryFilter = With<Camera>;
|
||||
type Out = (DenoiseCAS, CASUniform);
|
||||
type Out = (DenoiseCas, CasUniform);
|
||||
|
||||
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
|
||||
if !item.enabled || item.sharpening_strength == 0.0 {
|
||||
return None;
|
||||
}
|
||||
Some((
|
||||
DenoiseCAS(item.denoise),
|
||||
CASUniform {
|
||||
DenoiseCas(item.denoise),
|
||||
CasUniform {
|
||||
// above 1.0 causes extreme artifacts and fireflies
|
||||
sharpness: item.sharpening_strength.clamp(0.0, 1.0),
|
||||
},
|
||||
|
@ -99,9 +99,9 @@ const CONTRAST_ADAPTIVE_SHARPENING_SHADER_HANDLE: Handle<Shader> =
|
|||
Handle::weak_from_u128(6925381244141981602);
|
||||
|
||||
/// Adds Support for Contrast Adaptive Sharpening (CAS).
|
||||
pub struct CASPlugin;
|
||||
pub struct CasPlugin;
|
||||
|
||||
impl Plugin for CASPlugin {
|
||||
impl Plugin for CasPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
|
@ -113,19 +113,19 @@ impl Plugin for CASPlugin {
|
|||
app.register_type::<ContrastAdaptiveSharpeningSettings>();
|
||||
app.add_plugins((
|
||||
ExtractComponentPlugin::<ContrastAdaptiveSharpeningSettings>::default(),
|
||||
UniformComponentPlugin::<CASUniform>::default(),
|
||||
UniformComponentPlugin::<CasUniform>::default(),
|
||||
));
|
||||
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
render_app
|
||||
.init_resource::<SpecializedRenderPipelines<CASPipeline>>()
|
||||
.init_resource::<SpecializedRenderPipelines<CasPipeline>>()
|
||||
.add_systems(Render, prepare_cas_pipelines.in_set(RenderSet::Prepare));
|
||||
|
||||
{
|
||||
render_app
|
||||
.add_render_graph_node::<CASNode>(Core3d, Node3d::ContrastAdaptiveSharpening)
|
||||
.add_render_graph_node::<CasNode>(Core3d, Node3d::ContrastAdaptiveSharpening)
|
||||
.add_render_graph_edge(
|
||||
Core3d,
|
||||
Node3d::Tonemapping,
|
||||
|
@ -142,7 +142,7 @@ impl Plugin for CASPlugin {
|
|||
}
|
||||
{
|
||||
render_app
|
||||
.add_render_graph_node::<CASNode>(Core2d, Node2d::ContrastAdaptiveSharpening)
|
||||
.add_render_graph_node::<CasNode>(Core2d, Node2d::ContrastAdaptiveSharpening)
|
||||
.add_render_graph_edge(
|
||||
Core2d,
|
||||
Node2d::Tonemapping,
|
||||
|
@ -163,17 +163,17 @@ impl Plugin for CASPlugin {
|
|||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
render_app.init_resource::<CASPipeline>();
|
||||
render_app.init_resource::<CasPipeline>();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Resource)]
|
||||
pub struct CASPipeline {
|
||||
pub struct CasPipeline {
|
||||
texture_bind_group: BindGroupLayout,
|
||||
sampler: Sampler,
|
||||
}
|
||||
|
||||
impl FromWorld for CASPipeline {
|
||||
impl FromWorld for CasPipeline {
|
||||
fn from_world(render_world: &mut World) -> Self {
|
||||
let render_device = render_world.resource::<RenderDevice>();
|
||||
let texture_bind_group = render_device.create_bind_group_layout(
|
||||
|
@ -184,14 +184,14 @@ impl FromWorld for CASPipeline {
|
|||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
// CAS Settings
|
||||
uniform_buffer::<CASUniform>(true),
|
||||
uniform_buffer::<CasUniform>(true),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
let sampler = render_device.create_sampler(&SamplerDescriptor::default());
|
||||
|
||||
CASPipeline {
|
||||
CasPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
}
|
||||
|
@ -199,13 +199,13 @@ impl FromWorld for CASPipeline {
|
|||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
|
||||
pub struct CASPipelineKey {
|
||||
pub struct CasPipelineKey {
|
||||
texture_format: TextureFormat,
|
||||
denoise: bool,
|
||||
}
|
||||
|
||||
impl SpecializedRenderPipeline for CASPipeline {
|
||||
type Key = CASPipelineKey;
|
||||
impl SpecializedRenderPipeline for CasPipeline {
|
||||
type Key = CasPipelineKey;
|
||||
|
||||
fn specialize(&self, key: Self::Key) -> RenderPipelineDescriptor {
|
||||
let mut shader_defs = vec![];
|
||||
|
@ -237,15 +237,15 @@ impl SpecializedRenderPipeline for CASPipeline {
|
|||
fn prepare_cas_pipelines(
|
||||
mut commands: Commands,
|
||||
pipeline_cache: Res<PipelineCache>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<CASPipeline>>,
|
||||
sharpening_pipeline: Res<CASPipeline>,
|
||||
views: Query<(Entity, &ExtractedView, &DenoiseCAS), With<CASUniform>>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<CasPipeline>>,
|
||||
sharpening_pipeline: Res<CasPipeline>,
|
||||
views: Query<(Entity, &ExtractedView, &DenoiseCas), With<CasUniform>>,
|
||||
) {
|
||||
for (entity, view, cas_settings) in &views {
|
||||
let pipeline_id = pipelines.specialize(
|
||||
&pipeline_cache,
|
||||
&sharpening_pipeline,
|
||||
CASPipelineKey {
|
||||
CasPipelineKey {
|
||||
denoise: cas_settings.0,
|
||||
texture_format: if view.hdr {
|
||||
ViewTarget::TEXTURE_FORMAT_HDR
|
||||
|
@ -255,9 +255,9 @@ fn prepare_cas_pipelines(
|
|||
},
|
||||
);
|
||||
|
||||
commands.entity(entity).insert(ViewCASPipeline(pipeline_id));
|
||||
commands.entity(entity).insert(ViewCasPipeline(pipeline_id));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Component)]
|
||||
pub struct ViewCASPipeline(CachedRenderPipelineId);
|
||||
pub struct ViewCasPipeline(CachedRenderPipelineId);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::sync::Mutex;
|
||||
|
||||
use crate::contrast_adaptive_sharpening::ViewCASPipeline;
|
||||
use crate::contrast_adaptive_sharpening::ViewCasPipeline;
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_render::{
|
||||
extract_component::{ComponentUniforms, DynamicUniformIndex},
|
||||
|
@ -13,21 +13,21 @@ use bevy_render::{
|
|||
view::{ExtractedView, ViewTarget},
|
||||
};
|
||||
|
||||
use super::{CASPipeline, CASUniform};
|
||||
use super::{CasPipeline, CasUniform};
|
||||
|
||||
pub struct CASNode {
|
||||
pub struct CasNode {
|
||||
query: QueryState<
|
||||
(
|
||||
&'static ViewTarget,
|
||||
&'static ViewCASPipeline,
|
||||
&'static DynamicUniformIndex<CASUniform>,
|
||||
&'static ViewCasPipeline,
|
||||
&'static DynamicUniformIndex<CasUniform>,
|
||||
),
|
||||
With<ExtractedView>,
|
||||
>,
|
||||
cached_bind_group: Mutex<Option<(BufferId, TextureViewId, BindGroup)>>,
|
||||
}
|
||||
|
||||
impl FromWorld for CASNode {
|
||||
impl FromWorld for CasNode {
|
||||
fn from_world(world: &mut World) -> Self {
|
||||
Self {
|
||||
query: QueryState::new(world),
|
||||
|
@ -36,7 +36,7 @@ impl FromWorld for CASNode {
|
|||
}
|
||||
}
|
||||
|
||||
impl Node for CASNode {
|
||||
impl Node for CasNode {
|
||||
fn update(&mut self, world: &mut World) {
|
||||
self.query.update_archetypes(world);
|
||||
}
|
||||
|
@ -49,8 +49,8 @@ impl Node for CASNode {
|
|||
) -> Result<(), NodeRunError> {
|
||||
let view_entity = graph.view_entity();
|
||||
let pipeline_cache = world.resource::<PipelineCache>();
|
||||
let sharpening_pipeline = world.resource::<CASPipeline>();
|
||||
let uniforms = world.resource::<ComponentUniforms<CASUniform>>();
|
||||
let sharpening_pipeline = world.resource::<CasPipeline>();
|
||||
let uniforms = world.resource::<ComponentUniforms<CasUniform>>();
|
||||
|
||||
let Ok((target, pipeline, uniform_index)) = self.query.get_manual(world, view_entity)
|
||||
else {
|
||||
|
|
|
@ -2,6 +2,7 @@ use crate::core_2d::graph::Core2d;
|
|||
use crate::tonemapping::{DebandDither, Tonemapping};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_reflect::Reflect;
|
||||
use bevy_render::prelude::Msaa;
|
||||
use bevy_render::{
|
||||
camera::{
|
||||
Camera, CameraMainTextureUsages, CameraProjection, CameraRenderGraph,
|
||||
|
@ -35,6 +36,7 @@ pub struct Camera2dBundle {
|
|||
pub tonemapping: Tonemapping,
|
||||
pub deband_dither: DebandDither,
|
||||
pub main_texture_usages: CameraMainTextureUsages,
|
||||
pub msaa: Msaa,
|
||||
}
|
||||
|
||||
impl Default for Camera2dBundle {
|
||||
|
@ -58,6 +60,7 @@ impl Default for Camera2dBundle {
|
|||
tonemapping: Tonemapping::None,
|
||||
deband_dither: DebandDither::Disabled,
|
||||
main_texture_usages: Default::default(),
|
||||
msaa: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,6 +93,7 @@ impl Camera2dBundle {
|
|||
tonemapping: Tonemapping::None,
|
||||
deband_dither: DebandDither::Disabled,
|
||||
main_texture_usages: Default::default(),
|
||||
msaa: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
use crate::core_2d::Opaque2d;
|
||||
use bevy_ecs::{prelude::World, query::QueryItem};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
diagnostic::RecordDiagnostics,
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
render_phase::{TrackedRenderPass, ViewBinnedRenderPhases},
|
||||
render_resource::{CommandEncoderDescriptor, RenderPassDescriptor, StoreOp},
|
||||
renderer::RenderContext,
|
||||
view::{ViewDepthTexture, ViewTarget},
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
|
||||
use super::AlphaMask2d;
|
||||
|
||||
/// A [`bevy_render::render_graph::Node`] that runs the
|
||||
/// [`Opaque2d`] [`ViewBinnedRenderPhases`] and [`AlphaMask2d`] [`ViewBinnedRenderPhases`]
|
||||
#[derive(Default)]
|
||||
pub struct MainOpaquePass2dNode;
|
||||
impl ViewNode for MainOpaquePass2dNode {
|
||||
type ViewQuery = (
|
||||
&'static ExtractedCamera,
|
||||
&'static ViewTarget,
|
||||
&'static ViewDepthTexture,
|
||||
);
|
||||
|
||||
fn run<'w>(
|
||||
&self,
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext<'w>,
|
||||
(camera, target, depth): QueryItem<'w, Self::ViewQuery>,
|
||||
world: &'w World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
let (Some(opaque_phases), Some(alpha_mask_phases)) = (
|
||||
world.get_resource::<ViewBinnedRenderPhases<Opaque2d>>(),
|
||||
world.get_resource::<ViewBinnedRenderPhases<AlphaMask2d>>(),
|
||||
) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let diagnostics = render_context.diagnostic_recorder();
|
||||
|
||||
let color_attachments = [Some(target.get_color_attachment())];
|
||||
let depth_stencil_attachment = Some(depth.get_attachment(StoreOp::Store));
|
||||
|
||||
let view_entity = graph.view_entity();
|
||||
let (Some(opaque_phase), Some(alpha_mask_phase)) = (
|
||||
opaque_phases.get(&view_entity),
|
||||
alpha_mask_phases.get(&view_entity),
|
||||
) else {
|
||||
return Ok(());
|
||||
};
|
||||
render_context.add_command_buffer_generation_task(move |render_device| {
|
||||
#[cfg(feature = "trace")]
|
||||
let _main_opaque_pass_2d_span = info_span!("main_opaque_pass_2d").entered();
|
||||
|
||||
// Command encoder setup
|
||||
let mut command_encoder =
|
||||
render_device.create_command_encoder(&CommandEncoderDescriptor {
|
||||
label: Some("main_opaque_pass_2d_command_encoder"),
|
||||
});
|
||||
|
||||
// Render pass setup
|
||||
let render_pass = command_encoder.begin_render_pass(&RenderPassDescriptor {
|
||||
label: Some("main_opaque_pass_2d"),
|
||||
color_attachments: &color_attachments,
|
||||
depth_stencil_attachment,
|
||||
timestamp_writes: None,
|
||||
occlusion_query_set: None,
|
||||
});
|
||||
let mut render_pass = TrackedRenderPass::new(&render_device, render_pass);
|
||||
let pass_span = diagnostics.pass_span(&mut render_pass, "main_opaque_pass_2d");
|
||||
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
}
|
||||
|
||||
// Opaque draws
|
||||
if !opaque_phase.is_empty() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _opaque_main_pass_2d_span = info_span!("opaque_main_pass_2d").entered();
|
||||
if let Err(err) = opaque_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the 2d opaque phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
// Alpha mask draws
|
||||
if !alpha_mask_phase.is_empty() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _alpha_mask_main_pass_2d_span = info_span!("alpha_mask_main_pass_2d").entered();
|
||||
if let Err(err) = alpha_mask_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the 2d alpha mask phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
pass_span.end(&mut render_pass);
|
||||
drop(render_pass);
|
||||
command_encoder.finish()
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -5,10 +5,11 @@ use bevy_render::{
|
|||
diagnostic::RecordDiagnostics,
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
render_phase::ViewSortedRenderPhases,
|
||||
render_resource::RenderPassDescriptor,
|
||||
render_resource::{RenderPassDescriptor, StoreOp},
|
||||
renderer::RenderContext,
|
||||
view::ViewTarget,
|
||||
view::{ViewDepthTexture, ViewTarget},
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
|
||||
|
@ -16,13 +17,17 @@ use bevy_utils::tracing::info_span;
|
|||
pub struct MainTransparentPass2dNode {}
|
||||
|
||||
impl ViewNode for MainTransparentPass2dNode {
|
||||
type ViewQuery = (&'static ExtractedCamera, &'static ViewTarget);
|
||||
type ViewQuery = (
|
||||
&'static ExtractedCamera,
|
||||
&'static ViewTarget,
|
||||
&'static ViewDepthTexture,
|
||||
);
|
||||
|
||||
fn run<'w>(
|
||||
&self,
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext<'w>,
|
||||
(camera, target): bevy_ecs::query::QueryItem<'w, Self::ViewQuery>,
|
||||
(camera, target, depth): bevy_ecs::query::QueryItem<'w, Self::ViewQuery>,
|
||||
world: &'w World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
let Some(transparent_phases) =
|
||||
|
@ -46,7 +51,13 @@ impl ViewNode for MainTransparentPass2dNode {
|
|||
let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
|
||||
label: Some("main_transparent_pass_2d"),
|
||||
color_attachments: &[Some(target.get_color_attachment())],
|
||||
depth_stencil_attachment: None,
|
||||
// NOTE: For the transparent pass we load the depth buffer. There should be no
|
||||
// need to write to it, but store is set to `true` as a workaround for issue #3776,
|
||||
// https://github.com/bevyengine/bevy/issues/3776
|
||||
// so that wgpu does not clear the depth buffer.
|
||||
// As the opaque and alpha mask passes run first, opaque meshes can occlude
|
||||
// transparent ones.
|
||||
depth_stencil_attachment: Some(depth.get_attachment(StoreOp::Store)),
|
||||
timestamp_writes: None,
|
||||
occlusion_query_set: None,
|
||||
});
|
||||
|
@ -58,7 +69,12 @@ impl ViewNode for MainTransparentPass2dNode {
|
|||
}
|
||||
|
||||
if !transparent_phase.items.is_empty() {
|
||||
transparent_phase.render(&mut render_pass, world, view_entity);
|
||||
#[cfg(feature = "trace")]
|
||||
let _transparent_main_pass_2d_span =
|
||||
info_span!("transparent_main_pass_2d").entered();
|
||||
if let Err(err) = transparent_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the transparent 2D phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
pass_span.end(&mut render_pass);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
mod camera_2d;
|
||||
mod main_opaque_pass_2d_node;
|
||||
mod main_transparent_pass_2d_node;
|
||||
|
||||
pub mod graph {
|
||||
|
@ -15,6 +16,7 @@ pub mod graph {
|
|||
pub enum Node2d {
|
||||
MsaaWriteback,
|
||||
StartMainPass,
|
||||
MainOpaquePass,
|
||||
MainTransparentPass,
|
||||
EndMainPass,
|
||||
Bloom,
|
||||
|
@ -30,21 +32,31 @@ pub mod graph {
|
|||
|
||||
use std::ops::Range;
|
||||
|
||||
use bevy_asset::UntypedAssetId;
|
||||
use bevy_utils::HashMap;
|
||||
pub use camera_2d::*;
|
||||
pub use main_opaque_pass_2d_node::*;
|
||||
pub use main_transparent_pass_2d_node::*;
|
||||
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_ecs::{entity::EntityHashSet, prelude::*};
|
||||
use bevy_math::FloatOrd;
|
||||
use bevy_render::{
|
||||
camera::Camera,
|
||||
camera::{Camera, ExtractedCamera},
|
||||
extract_component::ExtractComponentPlugin,
|
||||
render_graph::{EmptyNode, RenderGraphApp, ViewNodeRunner},
|
||||
render_phase::{
|
||||
sort_phase_system, CachedRenderPipelinePhaseItem, DrawFunctionId, DrawFunctions, PhaseItem,
|
||||
PhaseItemExtraIndex, SortedPhaseItem, ViewSortedRenderPhases,
|
||||
sort_phase_system, BinnedPhaseItem, CachedRenderPipelinePhaseItem, DrawFunctionId,
|
||||
DrawFunctions, PhaseItem, PhaseItemExtraIndex, SortedPhaseItem, ViewBinnedRenderPhases,
|
||||
ViewSortedRenderPhases,
|
||||
},
|
||||
render_resource::CachedRenderPipelineId,
|
||||
render_resource::{
|
||||
BindGroupId, CachedRenderPipelineId, Extent3d, TextureDescriptor, TextureDimension,
|
||||
TextureFormat, TextureUsages,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
texture::TextureCache,
|
||||
view::{Msaa, ViewDepthTexture},
|
||||
Extract, ExtractSchedule, Render, RenderApp, RenderSet,
|
||||
};
|
||||
|
||||
|
@ -52,6 +64,8 @@ use crate::{tonemapping::TonemappingNode, upscaling::UpscalingNode};
|
|||
|
||||
use self::graph::{Core2d, Node2d};
|
||||
|
||||
pub const CORE_2D_DEPTH_FORMAT: TextureFormat = TextureFormat::Depth32Float;
|
||||
|
||||
pub struct Core2dPlugin;
|
||||
|
||||
impl Plugin for Core2dPlugin {
|
||||
|
@ -63,17 +77,28 @@ impl Plugin for Core2dPlugin {
|
|||
return;
|
||||
};
|
||||
render_app
|
||||
.init_resource::<DrawFunctions<Opaque2d>>()
|
||||
.init_resource::<DrawFunctions<AlphaMask2d>>()
|
||||
.init_resource::<DrawFunctions<Transparent2d>>()
|
||||
.init_resource::<ViewSortedRenderPhases<Transparent2d>>()
|
||||
.init_resource::<ViewBinnedRenderPhases<Opaque2d>>()
|
||||
.init_resource::<ViewBinnedRenderPhases<AlphaMask2d>>()
|
||||
.add_systems(ExtractSchedule, extract_core_2d_camera_phases)
|
||||
.add_systems(
|
||||
Render,
|
||||
sort_phase_system::<Transparent2d>.in_set(RenderSet::PhaseSort),
|
||||
(
|
||||
sort_phase_system::<Transparent2d>.in_set(RenderSet::PhaseSort),
|
||||
prepare_core_2d_depth_textures.in_set(RenderSet::PrepareResources),
|
||||
),
|
||||
);
|
||||
|
||||
render_app
|
||||
.add_render_sub_graph(Core2d)
|
||||
.add_render_graph_node::<EmptyNode>(Core2d, Node2d::StartMainPass)
|
||||
.add_render_graph_node::<ViewNodeRunner<MainOpaquePass2dNode>>(
|
||||
Core2d,
|
||||
Node2d::MainOpaquePass,
|
||||
)
|
||||
.add_render_graph_node::<ViewNodeRunner<MainTransparentPass2dNode>>(
|
||||
Core2d,
|
||||
Node2d::MainTransparentPass,
|
||||
|
@ -86,6 +111,7 @@ impl Plugin for Core2dPlugin {
|
|||
Core2d,
|
||||
(
|
||||
Node2d::StartMainPass,
|
||||
Node2d::MainOpaquePass,
|
||||
Node2d::MainTransparentPass,
|
||||
Node2d::EndMainPass,
|
||||
Node2d::Tonemapping,
|
||||
|
@ -96,6 +122,177 @@ impl Plugin for Core2dPlugin {
|
|||
}
|
||||
}
|
||||
|
||||
/// Opaque 2D [`BinnedPhaseItem`]s.
|
||||
pub struct Opaque2d {
|
||||
/// The key, which determines which can be batched.
|
||||
pub key: Opaque2dBinKey,
|
||||
/// An entity from which data will be fetched, including the mesh if
|
||||
/// applicable.
|
||||
pub representative_entity: Entity,
|
||||
/// The ranges of instances.
|
||||
pub batch_range: Range<u32>,
|
||||
/// An extra index, which is either a dynamic offset or an index in the
|
||||
/// indirect parameters list.
|
||||
pub extra_index: PhaseItemExtraIndex,
|
||||
}
|
||||
|
||||
/// Data that must be identical in order to batch phase items together.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Opaque2dBinKey {
|
||||
/// The identifier of the render pipeline.
|
||||
pub pipeline: CachedRenderPipelineId,
|
||||
/// The function used to draw.
|
||||
pub draw_function: DrawFunctionId,
|
||||
/// The asset that this phase item is associated with.
|
||||
///
|
||||
/// Normally, this is the ID of the mesh, but for non-mesh items it might be
|
||||
/// the ID of another type of asset.
|
||||
pub asset_id: UntypedAssetId,
|
||||
/// The ID of a bind group specific to the material.
|
||||
pub material_bind_group_id: Option<BindGroupId>,
|
||||
}
|
||||
|
||||
impl PhaseItem for Opaque2d {
|
||||
#[inline]
|
||||
fn entity(&self) -> Entity {
|
||||
self.representative_entity
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn draw_function(&self) -> DrawFunctionId {
|
||||
self.key.draw_function
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn batch_range(&self) -> &Range<u32> {
|
||||
&self.batch_range
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn batch_range_mut(&mut self) -> &mut Range<u32> {
|
||||
&mut self.batch_range
|
||||
}
|
||||
|
||||
fn extra_index(&self) -> PhaseItemExtraIndex {
|
||||
self.extra_index
|
||||
}
|
||||
|
||||
fn batch_range_and_extra_index_mut(&mut self) -> (&mut Range<u32>, &mut PhaseItemExtraIndex) {
|
||||
(&mut self.batch_range, &mut self.extra_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl BinnedPhaseItem for Opaque2d {
|
||||
type BinKey = Opaque2dBinKey;
|
||||
|
||||
fn new(
|
||||
key: Self::BinKey,
|
||||
representative_entity: Entity,
|
||||
batch_range: Range<u32>,
|
||||
extra_index: PhaseItemExtraIndex,
|
||||
) -> Self {
|
||||
Opaque2d {
|
||||
key,
|
||||
representative_entity,
|
||||
batch_range,
|
||||
extra_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CachedRenderPipelinePhaseItem for Opaque2d {
|
||||
#[inline]
|
||||
fn cached_pipeline(&self) -> CachedRenderPipelineId {
|
||||
self.key.pipeline
|
||||
}
|
||||
}
|
||||
|
||||
/// Alpha mask 2D [`BinnedPhaseItem`]s.
|
||||
pub struct AlphaMask2d {
|
||||
/// The key, which determines which can be batched.
|
||||
pub key: AlphaMask2dBinKey,
|
||||
/// An entity from which data will be fetched, including the mesh if
|
||||
/// applicable.
|
||||
pub representative_entity: Entity,
|
||||
/// The ranges of instances.
|
||||
pub batch_range: Range<u32>,
|
||||
/// An extra index, which is either a dynamic offset or an index in the
|
||||
/// indirect parameters list.
|
||||
pub extra_index: PhaseItemExtraIndex,
|
||||
}
|
||||
|
||||
/// Data that must be identical in order to batch phase items together.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct AlphaMask2dBinKey {
|
||||
/// The identifier of the render pipeline.
|
||||
pub pipeline: CachedRenderPipelineId,
|
||||
/// The function used to draw.
|
||||
pub draw_function: DrawFunctionId,
|
||||
/// The asset that this phase item is associated with.
|
||||
///
|
||||
/// Normally, this is the ID of the mesh, but for non-mesh items it might be
|
||||
/// the ID of another type of asset.
|
||||
pub asset_id: UntypedAssetId,
|
||||
/// The ID of a bind group specific to the material.
|
||||
pub material_bind_group_id: Option<BindGroupId>,
|
||||
}
|
||||
|
||||
impl PhaseItem for AlphaMask2d {
|
||||
#[inline]
|
||||
fn entity(&self) -> Entity {
|
||||
self.representative_entity
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn draw_function(&self) -> DrawFunctionId {
|
||||
self.key.draw_function
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn batch_range(&self) -> &Range<u32> {
|
||||
&self.batch_range
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn batch_range_mut(&mut self) -> &mut Range<u32> {
|
||||
&mut self.batch_range
|
||||
}
|
||||
|
||||
fn extra_index(&self) -> PhaseItemExtraIndex {
|
||||
self.extra_index
|
||||
}
|
||||
|
||||
fn batch_range_and_extra_index_mut(&mut self) -> (&mut Range<u32>, &mut PhaseItemExtraIndex) {
|
||||
(&mut self.batch_range, &mut self.extra_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl BinnedPhaseItem for AlphaMask2d {
|
||||
type BinKey = AlphaMask2dBinKey;
|
||||
|
||||
fn new(
|
||||
key: Self::BinKey,
|
||||
representative_entity: Entity,
|
||||
batch_range: Range<u32>,
|
||||
extra_index: PhaseItemExtraIndex,
|
||||
) -> Self {
|
||||
AlphaMask2d {
|
||||
key,
|
||||
representative_entity,
|
||||
batch_range,
|
||||
extra_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CachedRenderPipelinePhaseItem for AlphaMask2d {
|
||||
#[inline]
|
||||
fn cached_pipeline(&self) -> CachedRenderPipelineId {
|
||||
self.key.pipeline
|
||||
}
|
||||
}
|
||||
|
||||
/// Transparent 2D [`SortedPhaseItem`]s.
|
||||
pub struct Transparent2d {
|
||||
pub sort_key: FloatOrd,
|
||||
pub entity: Entity,
|
||||
|
@ -162,6 +359,8 @@ impl CachedRenderPipelinePhaseItem for Transparent2d {
|
|||
pub fn extract_core_2d_camera_phases(
|
||||
mut commands: Commands,
|
||||
mut transparent_2d_phases: ResMut<ViewSortedRenderPhases<Transparent2d>>,
|
||||
mut opaque_2d_phases: ResMut<ViewBinnedRenderPhases<Opaque2d>>,
|
||||
mut alpha_mask_2d_phases: ResMut<ViewBinnedRenderPhases<AlphaMask2d>>,
|
||||
cameras_2d: Extract<Query<(Entity, &Camera), With<Camera2d>>>,
|
||||
mut live_entities: Local<EntityHashSet>,
|
||||
) {
|
||||
|
@ -174,10 +373,63 @@ pub fn extract_core_2d_camera_phases(
|
|||
|
||||
commands.get_or_spawn(entity);
|
||||
transparent_2d_phases.insert_or_clear(entity);
|
||||
opaque_2d_phases.insert_or_clear(entity);
|
||||
alpha_mask_2d_phases.insert_or_clear(entity);
|
||||
|
||||
live_entities.insert(entity);
|
||||
}
|
||||
|
||||
// Clear out all dead views.
|
||||
transparent_2d_phases.retain(|camera_entity, _| live_entities.contains(camera_entity));
|
||||
opaque_2d_phases.retain(|camera_entity, _| live_entities.contains(camera_entity));
|
||||
alpha_mask_2d_phases.retain(|camera_entity, _| live_entities.contains(camera_entity));
|
||||
}
|
||||
|
||||
pub fn prepare_core_2d_depth_textures(
|
||||
mut commands: Commands,
|
||||
mut texture_cache: ResMut<TextureCache>,
|
||||
render_device: Res<RenderDevice>,
|
||||
transparent_2d_phases: Res<ViewSortedRenderPhases<Transparent2d>>,
|
||||
opaque_2d_phases: Res<ViewBinnedRenderPhases<Opaque2d>>,
|
||||
views_2d: Query<(Entity, &ExtractedCamera, &Msaa), (With<Camera2d>,)>,
|
||||
) {
|
||||
let mut textures = HashMap::default();
|
||||
for (view, camera, msaa) in &views_2d {
|
||||
if !opaque_2d_phases.contains_key(&view) || !transparent_2d_phases.contains_key(&view) {
|
||||
continue;
|
||||
};
|
||||
|
||||
let Some(physical_target_size) = camera.physical_target_size else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let cached_texture = textures
|
||||
.entry(camera.target.clone())
|
||||
.or_insert_with(|| {
|
||||
// The size of the depth texture
|
||||
let size = Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
};
|
||||
|
||||
let descriptor = TextureDescriptor {
|
||||
label: Some("view_depth_texture"),
|
||||
size,
|
||||
mip_level_count: 1,
|
||||
sample_count: msaa.samples(),
|
||||
dimension: TextureDimension::D2,
|
||||
format: CORE_2D_DEPTH_FORMAT,
|
||||
usage: TextureUsages::RENDER_ATTACHMENT,
|
||||
view_formats: &[],
|
||||
};
|
||||
|
||||
texture_cache.get(&render_device, descriptor)
|
||||
})
|
||||
.clone();
|
||||
|
||||
commands
|
||||
.entity(view)
|
||||
.insert(ViewDepthTexture::new(cached_texture, Some(0.0)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ use crate::{
|
|||
};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_reflect::{Reflect, ReflectDeserialize, ReflectSerialize};
|
||||
use bevy_render::view::Msaa;
|
||||
use bevy_render::{
|
||||
camera::{Camera, CameraMainTextureUsages, CameraRenderGraph, Exposure, Projection},
|
||||
extract_component::ExtractComponent,
|
||||
|
@ -152,6 +153,7 @@ pub struct Camera3dBundle {
|
|||
pub color_grading: ColorGrading,
|
||||
pub exposure: Exposure,
|
||||
pub main_texture_usages: CameraMainTextureUsages,
|
||||
pub msaa: Msaa,
|
||||
}
|
||||
|
||||
// NOTE: ideally Perspective and Orthographic defaults can share the same impl, but sadly it breaks rust's type inference
|
||||
|
@ -171,6 +173,7 @@ impl Default for Camera3dBundle {
|
|||
exposure: Default::default(),
|
||||
main_texture_usages: Default::default(),
|
||||
deband_dither: DebandDither::Enabled,
|
||||
msaa: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ use bevy_render::{
|
|||
renderer::RenderContext,
|
||||
view::{ViewDepthTexture, ViewTarget, ViewUniformOffset},
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
|
||||
|
@ -95,14 +96,18 @@ impl ViewNode for MainOpaquePass3dNode {
|
|||
if !opaque_phase.is_empty() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _opaque_main_pass_3d_span = info_span!("opaque_main_pass_3d").entered();
|
||||
opaque_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) = opaque_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the opaque phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
// Alpha draws
|
||||
if !alpha_mask_phase.is_empty() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _alpha_mask_main_pass_3d_span = info_span!("alpha_mask_main_pass_3d").entered();
|
||||
alpha_mask_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) = alpha_mask_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the alpha mask phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
// Skybox draw using a fullscreen triangle
|
||||
|
|
|
@ -9,6 +9,7 @@ use bevy_render::{
|
|||
renderer::RenderContext,
|
||||
view::{ViewDepthTexture, ViewTarget},
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
use std::ops::Range;
|
||||
|
@ -98,7 +99,11 @@ impl ViewNode for MainTransmissivePass3dNode {
|
|||
}
|
||||
|
||||
// render items in range
|
||||
transmissive_phase.render_range(&mut render_pass, world, view_entity, range);
|
||||
if let Err(err) =
|
||||
transmissive_phase.render_range(&mut render_pass, world, view_entity, range)
|
||||
{
|
||||
error!("Error encountered while rendering the transmissive phase {err:?}");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut render_pass =
|
||||
|
@ -108,7 +113,9 @@ impl ViewNode for MainTransmissivePass3dNode {
|
|||
render_pass.set_camera_viewport(viewport);
|
||||
}
|
||||
|
||||
transmissive_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) = transmissive_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the transmissive phase {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ use bevy_render::{
|
|||
renderer::RenderContext,
|
||||
view::{ViewDepthTexture, ViewTarget},
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
|
||||
|
@ -70,7 +71,9 @@ impl ViewNode for MainTransparentPass3dNode {
|
|||
render_pass.set_camera_viewport(viewport);
|
||||
}
|
||||
|
||||
transparent_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) = transparent_phase.render(&mut render_pass, world, view_entity) {
|
||||
error!("Error encountered while rendering the transparent phase {err:?}");
|
||||
}
|
||||
|
||||
pass_span.end(&mut render_pass);
|
||||
}
|
||||
|
|
|
@ -584,20 +584,12 @@ pub fn extract_camera_prepass_phase(
|
|||
|
||||
live_entities.insert(entity);
|
||||
|
||||
let mut entity = commands.get_or_spawn(entity);
|
||||
|
||||
if depth_prepass {
|
||||
entity.insert(DepthPrepass);
|
||||
}
|
||||
if normal_prepass {
|
||||
entity.insert(NormalPrepass);
|
||||
}
|
||||
if motion_vector_prepass {
|
||||
entity.insert(MotionVectorPrepass);
|
||||
}
|
||||
if deferred_prepass {
|
||||
entity.insert(DeferredPrepass);
|
||||
}
|
||||
commands
|
||||
.get_or_spawn(entity)
|
||||
.insert_if(DepthPrepass, || depth_prepass)
|
||||
.insert_if(NormalPrepass, || normal_prepass)
|
||||
.insert_if(MotionVectorPrepass, || motion_vector_prepass)
|
||||
.insert_if(DeferredPrepass, || deferred_prepass);
|
||||
}
|
||||
|
||||
opaque_3d_prepass_phases.retain(|entity, _| live_entities.contains(entity));
|
||||
|
@ -610,16 +602,21 @@ pub fn extract_camera_prepass_phase(
|
|||
pub fn prepare_core_3d_depth_textures(
|
||||
mut commands: Commands,
|
||||
mut texture_cache: ResMut<TextureCache>,
|
||||
msaa: Res<Msaa>,
|
||||
render_device: Res<RenderDevice>,
|
||||
opaque_3d_phases: Res<ViewBinnedRenderPhases<Opaque3d>>,
|
||||
alpha_mask_3d_phases: Res<ViewBinnedRenderPhases<AlphaMask3d>>,
|
||||
transmissive_3d_phases: Res<ViewSortedRenderPhases<Transmissive3d>>,
|
||||
transparent_3d_phases: Res<ViewSortedRenderPhases<Transparent3d>>,
|
||||
views_3d: Query<(Entity, &ExtractedCamera, Option<&DepthPrepass>, &Camera3d)>,
|
||||
views_3d: Query<(
|
||||
Entity,
|
||||
&ExtractedCamera,
|
||||
Option<&DepthPrepass>,
|
||||
&Camera3d,
|
||||
&Msaa,
|
||||
)>,
|
||||
) {
|
||||
let mut render_target_usage = HashMap::default();
|
||||
for (view, camera, depth_prepass, camera_3d) in &views_3d {
|
||||
for (view, camera, depth_prepass, camera_3d, _msaa) in &views_3d {
|
||||
if !opaque_3d_phases.contains_key(&view)
|
||||
|| !alpha_mask_3d_phases.contains_key(&view)
|
||||
|| !transmissive_3d_phases.contains_key(&view)
|
||||
|
@ -641,13 +638,13 @@ pub fn prepare_core_3d_depth_textures(
|
|||
}
|
||||
|
||||
let mut textures = HashMap::default();
|
||||
for (entity, camera, _, camera_3d) in &views_3d {
|
||||
for (entity, camera, _, camera_3d, msaa) in &views_3d {
|
||||
let Some(physical_target_size) = camera.physical_target_size else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let cached_texture = textures
|
||||
.entry(camera.target.clone())
|
||||
.entry((camera.target.clone(), msaa))
|
||||
.or_insert_with(|| {
|
||||
// The size of the depth texture
|
||||
let size = Extent3d {
|
||||
|
@ -779,11 +776,8 @@ pub fn prepare_core_3d_transmission_textures(
|
|||
}
|
||||
|
||||
// Disable MSAA and warn if using deferred rendering
|
||||
pub fn check_msaa(
|
||||
mut msaa: ResMut<Msaa>,
|
||||
deferred_views: Query<Entity, (With<Camera>, With<DeferredPrepass>)>,
|
||||
) {
|
||||
if !deferred_views.is_empty() {
|
||||
pub fn check_msaa(mut deferred_views: Query<&mut Msaa, (With<Camera>, With<DeferredPrepass>)>) {
|
||||
for mut msaa in deferred_views.iter_mut() {
|
||||
match *msaa {
|
||||
Msaa::Off => (),
|
||||
_ => {
|
||||
|
@ -799,7 +793,6 @@ pub fn check_msaa(
|
|||
pub fn prepare_prepass_textures(
|
||||
mut commands: Commands,
|
||||
mut texture_cache: ResMut<TextureCache>,
|
||||
msaa: Res<Msaa>,
|
||||
render_device: Res<RenderDevice>,
|
||||
opaque_3d_prepass_phases: Res<ViewBinnedRenderPhases<Opaque3dPrepass>>,
|
||||
alpha_mask_3d_prepass_phases: Res<ViewBinnedRenderPhases<AlphaMask3dPrepass>>,
|
||||
|
@ -808,6 +801,7 @@ pub fn prepare_prepass_textures(
|
|||
views_3d: Query<(
|
||||
Entity,
|
||||
&ExtractedCamera,
|
||||
&Msaa,
|
||||
Has<DepthPrepass>,
|
||||
Has<NormalPrepass>,
|
||||
Has<MotionVectorPrepass>,
|
||||
|
@ -819,8 +813,15 @@ pub fn prepare_prepass_textures(
|
|||
let mut deferred_textures = HashMap::default();
|
||||
let mut deferred_lighting_id_textures = HashMap::default();
|
||||
let mut motion_vectors_textures = HashMap::default();
|
||||
for (entity, camera, depth_prepass, normal_prepass, motion_vector_prepass, deferred_prepass) in
|
||||
&views_3d
|
||||
for (
|
||||
entity,
|
||||
camera,
|
||||
msaa,
|
||||
depth_prepass,
|
||||
normal_prepass,
|
||||
motion_vector_prepass,
|
||||
deferred_prepass,
|
||||
) in &views_3d
|
||||
{
|
||||
if !opaque_3d_prepass_phases.contains_key(&entity)
|
||||
&& !alpha_mask_3d_prepass_phases.contains_key(&entity)
|
||||
|
|
|
@ -11,6 +11,7 @@ use bevy_render::{
|
|||
renderer::RenderContext,
|
||||
view::ViewDepthTexture,
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
|
||||
|
@ -149,14 +150,23 @@ impl ViewNode for DeferredGBufferPrepassNode {
|
|||
{
|
||||
#[cfg(feature = "trace")]
|
||||
let _opaque_prepass_span = info_span!("opaque_deferred_prepass").entered();
|
||||
opaque_deferred_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) = opaque_deferred_phase.render(&mut render_pass, world, view_entity)
|
||||
{
|
||||
error!("Error encountered while rendering the opaque deferred phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
// Alpha masked draws
|
||||
if !alpha_mask_deferred_phase.is_empty() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _alpha_mask_deferred_span = info_span!("alpha_mask_deferred_prepass").entered();
|
||||
alpha_mask_deferred_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) =
|
||||
alpha_mask_deferred_phase.render(&mut render_pass, world, view_entity)
|
||||
{
|
||||
error!(
|
||||
"Error encountered while rendering the alpha mask deferred phase {err:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
drop(render_pass);
|
||||
|
|
|
@ -21,10 +21,12 @@ use bevy_ecs::{
|
|||
component::Component,
|
||||
entity::Entity,
|
||||
query::{QueryItem, With},
|
||||
reflect::ReflectComponent,
|
||||
schedule::IntoSystemConfigs as _,
|
||||
system::{lifetimeless::Read, Commands, Query, Res, ResMut, Resource},
|
||||
world::{FromWorld, World},
|
||||
};
|
||||
use bevy_reflect::{prelude::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
camera::{PhysicalCameraParameters, Projection},
|
||||
extract_component::{ComponentUniforms, DynamicUniformIndex, UniformComponentPlugin},
|
||||
|
@ -67,7 +69,8 @@ const DOF_SHADER_HANDLE: Handle<Shader> = Handle::weak_from_u128(203186118073921
|
|||
pub struct DepthOfFieldPlugin;
|
||||
|
||||
/// Depth of field settings.
|
||||
#[derive(Component, Clone, Copy)]
|
||||
#[derive(Component, Clone, Copy, Reflect)]
|
||||
#[reflect(Component, Default)]
|
||||
pub struct DepthOfFieldSettings {
|
||||
/// The appearance of the effect.
|
||||
pub mode: DepthOfFieldMode,
|
||||
|
@ -110,7 +113,8 @@ pub struct DepthOfFieldSettings {
|
|||
}
|
||||
|
||||
/// Controls the appearance of the effect.
|
||||
#[derive(Component, Clone, Copy, Default, PartialEq, Debug)]
|
||||
#[derive(Clone, Copy, Default, PartialEq, Debug, Reflect)]
|
||||
#[reflect(Default, PartialEq)]
|
||||
pub enum DepthOfFieldMode {
|
||||
/// A more accurate simulation, in which circles of confusion generate
|
||||
/// "spots" of light.
|
||||
|
@ -195,6 +199,8 @@ impl Plugin for DepthOfFieldPlugin {
|
|||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, DOF_SHADER_HANDLE, "dof.wgsl", Shader::from_wgsl);
|
||||
|
||||
app.register_type::<DepthOfFieldSettings>();
|
||||
app.register_type::<DepthOfFieldMode>();
|
||||
app.add_plugins(UniformComponentPlugin::<DepthOfFieldUniform>::default());
|
||||
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
|
@ -515,11 +521,10 @@ impl FromWorld for DepthOfFieldGlobalBindGroupLayout {
|
|||
/// specific to each view.
|
||||
pub fn prepare_depth_of_field_view_bind_group_layouts(
|
||||
mut commands: Commands,
|
||||
view_targets: Query<(Entity, &DepthOfFieldSettings)>,
|
||||
msaa: Res<Msaa>,
|
||||
view_targets: Query<(Entity, &DepthOfFieldSettings, &Msaa)>,
|
||||
render_device: Res<RenderDevice>,
|
||||
) {
|
||||
for (view, dof_settings) in view_targets.iter() {
|
||||
for (view, dof_settings, msaa) in view_targets.iter() {
|
||||
// Create the bind group layout for the passes that take one input.
|
||||
let single_input = render_device.create_bind_group_layout(
|
||||
Some("depth of field bind group layout (single input)"),
|
||||
|
@ -646,16 +651,16 @@ pub fn prepare_depth_of_field_pipelines(
|
|||
mut commands: Commands,
|
||||
pipeline_cache: Res<PipelineCache>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<DepthOfFieldPipeline>>,
|
||||
msaa: Res<Msaa>,
|
||||
global_bind_group_layout: Res<DepthOfFieldGlobalBindGroupLayout>,
|
||||
view_targets: Query<(
|
||||
Entity,
|
||||
&ExtractedView,
|
||||
&DepthOfFieldSettings,
|
||||
&ViewDepthOfFieldBindGroupLayouts,
|
||||
&Msaa,
|
||||
)>,
|
||||
) {
|
||||
for (entity, view, dof_settings, view_bind_group_layouts) in view_targets.iter() {
|
||||
for (entity, view, dof_settings, view_bind_group_layouts, msaa) in view_targets.iter() {
|
||||
let dof_pipeline = DepthOfFieldPipeline {
|
||||
view_bind_group_layouts: view_bind_group_layouts.clone(),
|
||||
global_bind_group_layout: global_bind_group_layout.layout.clone(),
|
||||
|
|
|
@ -52,7 +52,7 @@ pub mod prelude {
|
|||
use crate::{
|
||||
blit::BlitPlugin,
|
||||
bloom::BloomPlugin,
|
||||
contrast_adaptive_sharpening::CASPlugin,
|
||||
contrast_adaptive_sharpening::CasPlugin,
|
||||
core_2d::Core2dPlugin,
|
||||
core_3d::Core3dPlugin,
|
||||
deferred::copy_lighting_id::CopyDeferredLightingIdPlugin,
|
||||
|
@ -97,7 +97,7 @@ impl Plugin for CorePipelinePlugin {
|
|||
UpscalingPlugin,
|
||||
BloomPlugin,
|
||||
FxaaPlugin,
|
||||
CASPlugin,
|
||||
CasPlugin,
|
||||
MotionBlurPlugin,
|
||||
DepthOfFieldPlugin,
|
||||
SmaaPlugin,
|
||||
|
|
|
@ -27,12 +27,13 @@ impl ViewNode for MotionBlurNode {
|
|||
&'static MotionBlurPipelineId,
|
||||
&'static ViewPrepassTextures,
|
||||
&'static MotionBlur,
|
||||
&'static Msaa,
|
||||
);
|
||||
fn run(
|
||||
&self,
|
||||
_graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext,
|
||||
(view_target, pipeline_id, prepass_textures, settings): QueryItem<Self::ViewQuery>,
|
||||
(view_target, pipeline_id, prepass_textures, settings, msaa): QueryItem<Self::ViewQuery>,
|
||||
world: &World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
if settings.samples == 0 || settings.shutter_angle <= 0.0 {
|
||||
|
@ -60,7 +61,6 @@ impl ViewNode for MotionBlurNode {
|
|||
|
||||
let post_process = view_target.post_process_write();
|
||||
|
||||
let msaa = world.resource::<Msaa>();
|
||||
let layout = if msaa.samples() == 1 {
|
||||
&motion_blur_pipeline.layout
|
||||
} else {
|
||||
|
|
|
@ -153,10 +153,9 @@ pub(crate) fn prepare_motion_blur_pipelines(
|
|||
pipeline_cache: Res<PipelineCache>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<MotionBlurPipeline>>,
|
||||
pipeline: Res<MotionBlurPipeline>,
|
||||
msaa: Res<Msaa>,
|
||||
views: Query<(Entity, &ExtractedView), With<MotionBlur>>,
|
||||
views: Query<(Entity, &ExtractedView, &Msaa), With<MotionBlur>>,
|
||||
) {
|
||||
for (entity, view) in &views {
|
||||
for (entity, view, msaa) in &views {
|
||||
let pipeline_id = pipelines.specialize(
|
||||
&pipeline_cache,
|
||||
&pipeline,
|
||||
|
|
|
@ -6,9 +6,11 @@ use crate::{
|
|||
use bevy_app::{App, Plugin};
|
||||
use bevy_color::LinearRgba;
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_ecs::query::QueryItem;
|
||||
use bevy_render::render_graph::{ViewNode, ViewNodeRunner};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
render_graph::{Node, NodeRunError, RenderGraphApp, RenderGraphContext},
|
||||
render_graph::{NodeRunError, RenderGraphApp, RenderGraphContext},
|
||||
renderer::RenderContext,
|
||||
view::{Msaa, ViewTarget},
|
||||
Render, RenderSet,
|
||||
|
@ -30,90 +32,87 @@ impl Plugin for MsaaWritebackPlugin {
|
|||
);
|
||||
{
|
||||
render_app
|
||||
.add_render_graph_node::<MsaaWritebackNode>(Core2d, Node2d::MsaaWriteback)
|
||||
.add_render_graph_node::<ViewNodeRunner<MsaaWritebackNode>>(
|
||||
Core2d,
|
||||
Node2d::MsaaWriteback,
|
||||
)
|
||||
.add_render_graph_edge(Core2d, Node2d::MsaaWriteback, Node2d::StartMainPass);
|
||||
}
|
||||
{
|
||||
render_app
|
||||
.add_render_graph_node::<MsaaWritebackNode>(Core3d, Node3d::MsaaWriteback)
|
||||
.add_render_graph_node::<ViewNodeRunner<MsaaWritebackNode>>(
|
||||
Core3d,
|
||||
Node3d::MsaaWriteback,
|
||||
)
|
||||
.add_render_graph_edge(Core3d, Node3d::MsaaWriteback, Node3d::StartMainPass);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MsaaWritebackNode {
|
||||
cameras: QueryState<(&'static ViewTarget, &'static MsaaWritebackBlitPipeline)>,
|
||||
}
|
||||
#[derive(Default)]
|
||||
pub struct MsaaWritebackNode;
|
||||
|
||||
impl FromWorld for MsaaWritebackNode {
|
||||
fn from_world(world: &mut World) -> Self {
|
||||
Self {
|
||||
cameras: world.query(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ViewNode for MsaaWritebackNode {
|
||||
type ViewQuery = (
|
||||
&'static ViewTarget,
|
||||
&'static MsaaWritebackBlitPipeline,
|
||||
&'static Msaa,
|
||||
);
|
||||
|
||||
impl Node for MsaaWritebackNode {
|
||||
fn update(&mut self, world: &mut World) {
|
||||
self.cameras.update_archetypes(world);
|
||||
}
|
||||
|
||||
fn run(
|
||||
fn run<'w>(
|
||||
&self,
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext,
|
||||
world: &World,
|
||||
_graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext<'w>,
|
||||
(target, blit_pipeline_id, msaa): QueryItem<'w, Self::ViewQuery>,
|
||||
world: &'w World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
if *world.resource::<Msaa>() == Msaa::Off {
|
||||
if *msaa == Msaa::Off {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let view_entity = graph.view_entity();
|
||||
if let Ok((target, blit_pipeline_id)) = self.cameras.get_manual(world, view_entity) {
|
||||
let blit_pipeline = world.resource::<BlitPipeline>();
|
||||
let pipeline_cache = world.resource::<PipelineCache>();
|
||||
let Some(pipeline) = pipeline_cache.get_render_pipeline(blit_pipeline_id.0) else {
|
||||
return Ok(());
|
||||
};
|
||||
let blit_pipeline = world.resource::<BlitPipeline>();
|
||||
let pipeline_cache = world.resource::<PipelineCache>();
|
||||
let Some(pipeline) = pipeline_cache.get_render_pipeline(blit_pipeline_id.0) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// The current "main texture" needs to be bound as an input resource, and we need the "other"
|
||||
// unused target to be the "resolve target" for the MSAA write. Therefore this is the same
|
||||
// as a post process write!
|
||||
let post_process = target.post_process_write();
|
||||
// The current "main texture" needs to be bound as an input resource, and we need the "other"
|
||||
// unused target to be the "resolve target" for the MSAA write. Therefore this is the same
|
||||
// as a post process write!
|
||||
let post_process = target.post_process_write();
|
||||
|
||||
let pass_descriptor = RenderPassDescriptor {
|
||||
label: Some("msaa_writeback"),
|
||||
// The target's "resolve target" is the "destination" in post_process.
|
||||
// We will indirectly write the results to the "destination" using
|
||||
// the MSAA resolve step.
|
||||
color_attachments: &[Some(RenderPassColorAttachment {
|
||||
// If MSAA is enabled, then the sampled texture will always exist
|
||||
view: target.sampled_main_texture_view().unwrap(),
|
||||
resolve_target: Some(post_process.destination),
|
||||
ops: Operations {
|
||||
load: LoadOp::Clear(LinearRgba::BLACK.into()),
|
||||
store: StoreOp::Store,
|
||||
},
|
||||
})],
|
||||
depth_stencil_attachment: None,
|
||||
timestamp_writes: None,
|
||||
occlusion_query_set: None,
|
||||
};
|
||||
let pass_descriptor = RenderPassDescriptor {
|
||||
label: Some("msaa_writeback"),
|
||||
// The target's "resolve target" is the "destination" in post_process.
|
||||
// We will indirectly write the results to the "destination" using
|
||||
// the MSAA resolve step.
|
||||
color_attachments: &[Some(RenderPassColorAttachment {
|
||||
// If MSAA is enabled, then the sampled texture will always exist
|
||||
view: target.sampled_main_texture_view().unwrap(),
|
||||
resolve_target: Some(post_process.destination),
|
||||
ops: Operations {
|
||||
load: LoadOp::Clear(LinearRgba::BLACK.into()),
|
||||
store: StoreOp::Store,
|
||||
},
|
||||
})],
|
||||
depth_stencil_attachment: None,
|
||||
timestamp_writes: None,
|
||||
occlusion_query_set: None,
|
||||
};
|
||||
|
||||
let bind_group = render_context.render_device().create_bind_group(
|
||||
None,
|
||||
&blit_pipeline.texture_bind_group,
|
||||
&BindGroupEntries::sequential((post_process.source, &blit_pipeline.sampler)),
|
||||
);
|
||||
let bind_group = render_context.render_device().create_bind_group(
|
||||
None,
|
||||
&blit_pipeline.texture_bind_group,
|
||||
&BindGroupEntries::sequential((post_process.source, &blit_pipeline.sampler)),
|
||||
);
|
||||
|
||||
let mut render_pass = render_context
|
||||
.command_encoder()
|
||||
.begin_render_pass(&pass_descriptor);
|
||||
let mut render_pass = render_context
|
||||
.command_encoder()
|
||||
.begin_render_pass(&pass_descriptor);
|
||||
|
||||
render_pass.set_pipeline(pipeline);
|
||||
render_pass.set_bind_group(0, &bind_group, &[]);
|
||||
render_pass.draw(0..3, 0..1);
|
||||
}
|
||||
render_pass.set_pipeline(pipeline);
|
||||
render_pass.set_bind_group(0, &bind_group, &[]);
|
||||
render_pass.draw(0..3, 0..1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -127,10 +126,9 @@ fn prepare_msaa_writeback_pipelines(
|
|||
pipeline_cache: Res<PipelineCache>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<BlitPipeline>>,
|
||||
blit_pipeline: Res<BlitPipeline>,
|
||||
view_targets: Query<(Entity, &ViewTarget, &ExtractedCamera)>,
|
||||
msaa: Res<Msaa>,
|
||||
view_targets: Query<(Entity, &ViewTarget, &ExtractedCamera, &Msaa)>,
|
||||
) {
|
||||
for (entity, view_target, camera) in view_targets.iter() {
|
||||
for (entity, view_target, camera, msaa) in view_targets.iter() {
|
||||
// only do writeback if writeback is enabled for the camera and this isn't the first camera in the target,
|
||||
// as there is nothing to write back for the first camera.
|
||||
if msaa.samples() > 1 && camera.msaa_writeback && camera.sorted_camera_index_for_target > 0
|
||||
|
|
|
@ -9,6 +9,7 @@ use bevy_render::{
|
|||
renderer::RenderContext,
|
||||
view::{ViewDepthTexture, ViewUniformOffset},
|
||||
};
|
||||
use bevy_utils::tracing::error;
|
||||
#[cfg(feature = "trace")]
|
||||
use bevy_utils::tracing::info_span;
|
||||
|
||||
|
@ -125,14 +126,23 @@ impl ViewNode for PrepassNode {
|
|||
{
|
||||
#[cfg(feature = "trace")]
|
||||
let _opaque_prepass_span = info_span!("opaque_prepass").entered();
|
||||
opaque_prepass_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) = opaque_prepass_phase.render(&mut render_pass, world, view_entity)
|
||||
{
|
||||
error!("Error encountered while rendering the opaque prepass phase {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
// Alpha masked draws
|
||||
if !alpha_mask_prepass_phase.is_empty() {
|
||||
#[cfg(feature = "trace")]
|
||||
let _alpha_mask_prepass_span = info_span!("alpha_mask_prepass").entered();
|
||||
alpha_mask_prepass_phase.render(&mut render_pass, world, view_entity);
|
||||
if let Err(err) =
|
||||
alpha_mask_prepass_phase.render(&mut render_pass, world, view_entity)
|
||||
{
|
||||
error!(
|
||||
"Error encountered while rendering the alpha mask prepass phase {err:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Skybox draw using a fullscreen triangle
|
||||
|
|
|
@ -245,10 +245,9 @@ fn prepare_skybox_pipelines(
|
|||
pipeline_cache: Res<PipelineCache>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<SkyboxPipeline>>,
|
||||
pipeline: Res<SkyboxPipeline>,
|
||||
msaa: Res<Msaa>,
|
||||
views: Query<(Entity, &ExtractedView), With<Skybox>>,
|
||||
views: Query<(Entity, &ExtractedView, &Msaa), With<Skybox>>,
|
||||
) {
|
||||
for (entity, view) in &views {
|
||||
for (entity, view, msaa) in &views {
|
||||
let pipeline_id = pipelines.specialize(
|
||||
&pipeline_cache,
|
||||
&pipeline,
|
||||
|
|
|
@ -116,11 +116,10 @@ pub fn prepare_skybox_prepass_pipelines(
|
|||
mut commands: Commands,
|
||||
pipeline_cache: Res<PipelineCache>,
|
||||
mut pipelines: ResMut<SpecializedRenderPipelines<SkyboxPrepassPipeline>>,
|
||||
msaa: Res<Msaa>,
|
||||
pipeline: Res<SkyboxPrepassPipeline>,
|
||||
views: Query<(Entity, Has<NormalPrepass>), (With<Skybox>, With<MotionVectorPrepass>)>,
|
||||
views: Query<(Entity, Has<NormalPrepass>, &Msaa), (With<Skybox>, With<MotionVectorPrepass>)>,
|
||||
) {
|
||||
for (entity, normal_prepass) in &views {
|
||||
for (entity, normal_prepass, msaa) in &views {
|
||||
let pipeline_key = SkyboxPrepassPipelineKey {
|
||||
samples: msaa.samples(),
|
||||
normal_prepass,
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
//! To use SMAA, add [`SmaaSettings`] to a [`bevy_render::camera::Camera`]. In a
|
||||
//! pinch, you can simply use the default settings (via the [`Default`] trait)
|
||||
//! for a high-quality, high-performance appearance. When using SMAA, you will
|
||||
//! likely want to turn the default MSAA off by inserting the
|
||||
//! [`bevy_render::view::Msaa::Off`] resource into the [`App`].
|
||||
//! likely want set [`bevy_render::view::Msaa`] to [`bevy_render::view::Msaa::Off`]
|
||||
//! for every camera using SMAA.
|
||||
//!
|
||||
//! Those who have used SMAA in other engines should be aware that Bevy doesn't
|
||||
//! yet support the following more advanced features of SMAA:
|
||||
|
|
|
@ -34,10 +34,11 @@ use bevy_render::{
|
|||
view::{ExtractedView, Msaa, ViewTarget},
|
||||
ExtractSchedule, MainWorld, Render, RenderApp, RenderSet,
|
||||
};
|
||||
use bevy_utils::tracing::warn;
|
||||
|
||||
const TAA_SHADER_HANDLE: Handle<Shader> = Handle::weak_from_u128(656865235226276);
|
||||
|
||||
/// Plugin for temporal anti-aliasing. Disables multisample anti-aliasing (MSAA).
|
||||
/// Plugin for temporal anti-aliasing.
|
||||
///
|
||||
/// See [`TemporalAntiAliasSettings`] for more details.
|
||||
pub struct TemporalAntiAliasPlugin;
|
||||
|
@ -46,8 +47,7 @@ impl Plugin for TemporalAntiAliasPlugin {
|
|||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, TAA_SHADER_HANDLE, "taa.wgsl", Shader::from_wgsl);
|
||||
|
||||
app.insert_resource(Msaa::Off)
|
||||
.register_type::<TemporalAntiAliasSettings>();
|
||||
app.register_type::<TemporalAntiAliasSettings>();
|
||||
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
|
@ -118,6 +118,8 @@ pub struct TemporalAntiAliasBundle {
|
|||
///
|
||||
/// # Usage Notes
|
||||
///
|
||||
/// Any camera with this component must also disable [`Msaa`] by setting it to [`Msaa::Off`].
|
||||
///
|
||||
/// Requires that you add [`TemporalAntiAliasPlugin`] to your app,
|
||||
/// and add the [`DepthPrepass`], [`MotionVectorPrepass`], and [`TemporalJitter`]
|
||||
/// components to your camera.
|
||||
|
@ -162,17 +164,23 @@ impl ViewNode for TemporalAntiAliasNode {
|
|||
&'static TemporalAntiAliasHistoryTextures,
|
||||
&'static ViewPrepassTextures,
|
||||
&'static TemporalAntiAliasPipelineId,
|
||||
&'static Msaa,
|
||||
);
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext,
|
||||
(camera, view_target, taa_history_textures, prepass_textures, taa_pipeline_id): QueryItem<
|
||||
(camera, view_target, taa_history_textures, prepass_textures, taa_pipeline_id, msaa): QueryItem<
|
||||
Self::ViewQuery,
|
||||
>,
|
||||
world: &World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
if *msaa != Msaa::Off {
|
||||
warn!("Temporal anti-aliasing requires MSAA to be disabled");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (Some(pipelines), Some(pipeline_cache)) = (
|
||||
world.get_resource::<TaaPipeline>(),
|
||||
world.get_resource::<PipelineCache>(),
|
||||
|
@ -402,13 +410,13 @@ fn prepare_taa_history_textures(
|
|||
views: Query<(Entity, &ExtractedCamera, &ExtractedView), With<TemporalAntiAliasSettings>>,
|
||||
) {
|
||||
for (entity, camera, view) in &views {
|
||||
if let Some(physical_viewport_size) = camera.physical_viewport_size {
|
||||
if let Some(physical_target_size) = camera.physical_target_size {
|
||||
let mut texture_descriptor = TextureDescriptor {
|
||||
label: None,
|
||||
size: Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_viewport_size.x,
|
||||
height: physical_viewport_size.y,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
},
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
|
|
|
@ -17,7 +17,14 @@ impl Plugin for UpscalingPlugin {
|
|||
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
|
||||
render_app.add_systems(
|
||||
Render,
|
||||
prepare_view_upscaling_pipelines.in_set(RenderSet::Prepare),
|
||||
// This system should probably technically be run *after* all of the other systems
|
||||
// that might modify `PipelineCache` via interior mutability, but for now,
|
||||
// we've chosen to simply ignore the ambiguities out of a desire for a better refactor
|
||||
// and aversion to extensive and intrusive system ordering.
|
||||
// See https://github.com/bevyengine/bevy/issues/14770 for more context.
|
||||
prepare_view_upscaling_pipelines
|
||||
.in_set(RenderSet::Prepare)
|
||||
.ambiguous_with_all(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,6 +84,14 @@ impl ViewNode for UpscalingNode {
|
|||
.command_encoder()
|
||||
.begin_render_pass(&pass_descriptor);
|
||||
|
||||
if let Some(camera) = camera {
|
||||
if let Some(viewport) = &camera.viewport {
|
||||
let size = viewport.physical_size;
|
||||
let position = viewport.physical_position;
|
||||
render_pass.set_scissor_rect(position.x, position.y, size.x, size.y);
|
||||
}
|
||||
}
|
||||
|
||||
render_pass.set_pipeline(pipeline);
|
||||
render_pass.set_bind_group(0, bind_group, &[]);
|
||||
render_pass.draw(0..3, 0..1);
|
||||
|
|
|
@ -21,5 +21,5 @@ syn = { version = "2.0", features = ["full"] }
|
|||
workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["-Zunstable-options", "--cfg", "docsrs"]
|
||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||
all-features = true
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue