mirror of
https://github.com/bevyengine/bevy
synced 2024-11-25 14:10:19 +00:00
Remove old post_processing example (#8376)
# Objective - The old post processing example doesn't use the actual post processing features of bevy. It also has some issues with resizing. It's also causing some confusion for people because accessing the prepass textures from it is not easy. - There's already a render to texture example - At this point, it's mostly obsolete since the post_process_pass example is more complete and shows the recommended way to do post processing in bevy. It's a bit more complicated, but it's well documented and I'm working on simplifying it even more ## Solution - Remove the old post_processing example - Rename post_process_pass to post_processing ## Reviewer Notes The diff is really noisy because of the rename, but I didn't change any code in the example. --------- Co-authored-by: James Liu <contact@jamessliu.com>
This commit is contained in:
parent
8df014fbaf
commit
c7eaedd6a1
6 changed files with 355 additions and 581 deletions
10
Cargo.toml
10
Cargo.toml
|
@ -1415,16 +1415,6 @@ name = "post_processing"
|
||||||
path = "examples/shader/post_processing.rs"
|
path = "examples/shader/post_processing.rs"
|
||||||
|
|
||||||
[package.metadata.example.post_processing]
|
[package.metadata.example.post_processing]
|
||||||
name = "Post Processing - Render To Texture"
|
|
||||||
description = "A custom post processing effect, using two cameras, with one reusing the render texture of the first one"
|
|
||||||
category = "Shaders"
|
|
||||||
wasm = true
|
|
||||||
|
|
||||||
[[example]]
|
|
||||||
name = "post_process_pass"
|
|
||||||
path = "examples/shader/post_process_pass.rs"
|
|
||||||
|
|
||||||
[package.metadata.example.post_process_pass]
|
|
||||||
name = "Post Processing - Custom Render Pass"
|
name = "Post Processing - Custom Render Pass"
|
||||||
description = "A custom post processing effect, using a custom render pass that runs after the main pass"
|
description = "A custom post processing effect, using a custom render pass that runs after the main pass"
|
||||||
category = "Shaders"
|
category = "Shaders"
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
#import bevy_sprite::mesh2d_view_bindings
|
|
||||||
#import bevy_pbr::utils
|
|
||||||
|
|
||||||
@group(1) @binding(0)
|
|
||||||
var texture: texture_2d<f32>;
|
|
||||||
|
|
||||||
@group(1) @binding(1)
|
|
||||||
var our_sampler: sampler;
|
|
||||||
|
|
||||||
@fragment
|
|
||||||
fn fragment(
|
|
||||||
@builtin(position) position: vec4<f32>,
|
|
||||||
#import bevy_sprite::mesh2d_vertex_output
|
|
||||||
) -> @location(0) vec4<f32> {
|
|
||||||
// Get screen position with coordinates from 0 to 1
|
|
||||||
let uv = coords_to_viewport_uv(position.xy, view.viewport);
|
|
||||||
let offset_strength = 0.02;
|
|
||||||
|
|
||||||
// Sample each color channel with an arbitrary shift
|
|
||||||
var output_color = vec4<f32>(
|
|
||||||
textureSample(texture, our_sampler, uv + vec2<f32>(offset_strength, -offset_strength)).r,
|
|
||||||
textureSample(texture, our_sampler, uv + vec2<f32>(-offset_strength, 0.0)).g,
|
|
||||||
textureSample(texture, our_sampler, uv + vec2<f32>(0.0, offset_strength)).b,
|
|
||||||
1.0
|
|
||||||
);
|
|
||||||
|
|
||||||
return output_color;
|
|
||||||
}
|
|
|
@ -280,8 +280,7 @@ Example | Description
|
||||||
[Material - GLSL](../examples/shader/shader_material_glsl.rs) | A shader that uses the GLSL shading language
|
[Material - GLSL](../examples/shader/shader_material_glsl.rs) | A shader that uses the GLSL shading language
|
||||||
[Material - Screenspace Texture](../examples/shader/shader_material_screenspace_texture.rs) | A shader that samples a texture with view-independent UV coordinates
|
[Material - Screenspace Texture](../examples/shader/shader_material_screenspace_texture.rs) | A shader that samples a texture with view-independent UV coordinates
|
||||||
[Material Prepass](../examples/shader/shader_prepass.rs) | A shader that uses the various textures generated by the prepass
|
[Material Prepass](../examples/shader/shader_prepass.rs) | A shader that uses the various textures generated by the prepass
|
||||||
[Post Processing - Custom Render Pass](../examples/shader/post_process_pass.rs) | A custom post processing effect, using a custom render pass that runs after the main pass
|
[Post Processing - Custom Render Pass](../examples/shader/post_processing.rs) | A custom post processing effect, using a custom render pass that runs after the main pass
|
||||||
[Post Processing - Render To Texture](../examples/shader/post_processing.rs) | A custom post processing effect, using two cameras, with one reusing the render texture of the first one
|
|
||||||
[Shader Defs](../examples/shader/shader_defs.rs) | A shader that uses "shaders defs" (a bevy tool to selectively toggle parts of a shader)
|
[Shader Defs](../examples/shader/shader_defs.rs) | A shader that uses "shaders defs" (a bevy tool to selectively toggle parts of a shader)
|
||||||
[Texture Binding Array (Bindless Textures)](../examples/shader/texture_binding_array.rs) | A shader that shows how to bind and sample multiple textures as a binding array (a.k.a. bindless textures).
|
[Texture Binding Array (Bindless Textures)](../examples/shader/texture_binding_array.rs) | A shader that shows how to bind and sample multiple textures as a binding array (a.k.a. bindless textures).
|
||||||
|
|
||||||
|
|
|
@ -1,392 +0,0 @@
|
||||||
//! This example shows how to create a custom render pass that runs after the main pass
|
|
||||||
//! and reads the texture generated by the main pass.
|
|
||||||
//!
|
|
||||||
//! The example shader is a very simple implementation of chromatic aberration.
|
|
||||||
//!
|
|
||||||
//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu.
|
|
||||||
|
|
||||||
use bevy::{
|
|
||||||
core_pipeline::{
|
|
||||||
clear_color::ClearColorConfig, core_3d,
|
|
||||||
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
|
||||||
},
|
|
||||||
prelude::*,
|
|
||||||
render::{
|
|
||||||
extract_component::{
|
|
||||||
ComponentUniforms, ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin,
|
|
||||||
},
|
|
||||||
render_graph::{Node, NodeRunError, RenderGraphApp, RenderGraphContext},
|
|
||||||
render_resource::{
|
|
||||||
BindGroupDescriptor, BindGroupEntry, BindGroupLayout, BindGroupLayoutDescriptor,
|
|
||||||
BindGroupLayoutEntry, BindingResource, BindingType, CachedRenderPipelineId,
|
|
||||||
ColorTargetState, ColorWrites, FragmentState, MultisampleState, Operations,
|
|
||||||
PipelineCache, PrimitiveState, RenderPassColorAttachment, RenderPassDescriptor,
|
|
||||||
RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, ShaderStages,
|
|
||||||
ShaderType, TextureFormat, TextureSampleType, TextureViewDimension,
|
|
||||||
},
|
|
||||||
renderer::{RenderContext, RenderDevice},
|
|
||||||
texture::BevyDefault,
|
|
||||||
view::{ExtractedView, ViewTarget},
|
|
||||||
RenderApp,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
App::new()
|
|
||||||
.add_plugins(DefaultPlugins.set(AssetPlugin {
|
|
||||||
// Hot reloading the shader works correctly
|
|
||||||
watch_for_changes: true,
|
|
||||||
..default()
|
|
||||||
}))
|
|
||||||
.add_plugin(PostProcessPlugin)
|
|
||||||
.add_systems(Startup, setup)
|
|
||||||
.add_systems(Update, (rotate, update_settings))
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// It is generally encouraged to set up post processing effects as a plugin
|
|
||||||
struct PostProcessPlugin;
|
|
||||||
impl Plugin for PostProcessPlugin {
|
|
||||||
fn build(&self, app: &mut App) {
|
|
||||||
app
|
|
||||||
// The settings will be a component that lives in the main world but will
|
|
||||||
// be extracted to the render world every frame.
|
|
||||||
// This makes it possible to control the effect from the main world.
|
|
||||||
// This plugin will take care of extracting it automatically.
|
|
||||||
// It's important to derive [`ExtractComponent`] on [`PostProcessingSettings`]
|
|
||||||
// for this plugin to work correctly.
|
|
||||||
.add_plugin(ExtractComponentPlugin::<PostProcessSettings>::default())
|
|
||||||
// The settings will also be the data used in the shader.
|
|
||||||
// This plugin will prepare the component for the GPU by creating a uniform buffer
|
|
||||||
// and writing the data to that buffer every frame.
|
|
||||||
.add_plugin(UniformComponentPlugin::<PostProcessSettings>::default());
|
|
||||||
|
|
||||||
// We need to get the render app from the main app
|
|
||||||
let Ok(render_app) = app.get_sub_app_mut(RenderApp) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
render_app
|
|
||||||
// Initialize the pipeline
|
|
||||||
.init_resource::<PostProcessPipeline>()
|
|
||||||
// Bevy's renderer uses a render graph which is a collection of nodes in a directed acyclic graph.
|
|
||||||
// It currently runs on each view/camera and executes each node in the specified order.
|
|
||||||
// It will make sure that any node that needs a dependency from another node
|
|
||||||
// only runs when that dependency is done.
|
|
||||||
//
|
|
||||||
// Each node can execute arbitrary work, but it generally runs at least one render pass.
|
|
||||||
// A node only has access to the render world, so if you need data from the main world
|
|
||||||
// you need to extract it manually or with the plugin like above.
|
|
||||||
// Add a [`Node`] to the [`RenderGraph`]
|
|
||||||
// The Node needs to impl FromWorld
|
|
||||||
.add_render_graph_node::<PostProcessNode>(
|
|
||||||
// Specifiy the name of the graph, in this case we want the graph for 3d
|
|
||||||
core_3d::graph::NAME,
|
|
||||||
// It also needs the name of the node
|
|
||||||
PostProcessNode::NAME,
|
|
||||||
)
|
|
||||||
.add_render_graph_edges(
|
|
||||||
core_3d::graph::NAME,
|
|
||||||
// Specify the node ordering.
|
|
||||||
// This will automatically create all required node edges to enforce the given ordering.
|
|
||||||
&[
|
|
||||||
core_3d::graph::node::TONEMAPPING,
|
|
||||||
PostProcessNode::NAME,
|
|
||||||
core_3d::graph::node::END_MAIN_PASS_POST_PROCESSING,
|
|
||||||
],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The post process node used for the render graph
|
|
||||||
struct PostProcessNode {
|
|
||||||
// The node needs a query to gather data from the ECS in order to do its rendering,
|
|
||||||
// but it's not a normal system so we need to define it manually.
|
|
||||||
query: QueryState<&'static ViewTarget, With<ExtractedView>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PostProcessNode {
|
|
||||||
pub const NAME: &str = "post_process";
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromWorld for PostProcessNode {
|
|
||||||
fn from_world(world: &mut World) -> Self {
|
|
||||||
Self {
|
|
||||||
query: QueryState::new(world),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Node for PostProcessNode {
|
|
||||||
// This will run every frame before the run() method
|
|
||||||
// The important difference is that `self` is `mut` here
|
|
||||||
fn update(&mut self, world: &mut World) {
|
|
||||||
// Since this is not a system we need to update the query manually.
|
|
||||||
// This is mostly boilerplate. There are plans to remove this in the future.
|
|
||||||
// For now, you can just copy it.
|
|
||||||
self.query.update_archetypes(world);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Runs the node logic
|
|
||||||
// This is where you encode draw commands.
|
|
||||||
//
|
|
||||||
// This will run on every view on which the graph is running. If you don't want your effect to run on every camera,
|
|
||||||
// you'll need to make sure you have a marker component to identify which camera(s) should run the effect.
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
graph_context: &mut RenderGraphContext,
|
|
||||||
render_context: &mut RenderContext,
|
|
||||||
world: &World,
|
|
||||||
) -> Result<(), NodeRunError> {
|
|
||||||
// Get the entity of the view for the render graph where this node is running
|
|
||||||
let view_entity = graph_context.view_entity();
|
|
||||||
|
|
||||||
// We get the data we need from the world based on the view entity passed to the node.
|
|
||||||
// The data is the query that was defined earlier in the [`PostProcessNode`]
|
|
||||||
let Ok(view_target) = self.query.get_manual(world, view_entity) else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
// Get the pipeline resource that contains the global data we need to create the render pipeline
|
|
||||||
let post_process_pipeline = world.resource::<PostProcessPipeline>();
|
|
||||||
|
|
||||||
// The pipeline cache is a cache of all previously created pipelines.
|
|
||||||
// It is required to avoid creating a new pipeline each frame, which is expensive due to shader compilation.
|
|
||||||
let pipeline_cache = world.resource::<PipelineCache>();
|
|
||||||
|
|
||||||
// Get the pipeline from the cache
|
|
||||||
let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id) else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
// Get the settings uniform binding
|
|
||||||
let settings_uniforms = world.resource::<ComponentUniforms<PostProcessSettings>>();
|
|
||||||
let Some(settings_binding) = settings_uniforms.uniforms().binding() else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
// This will start a new "post process write", obtaining two texture
|
|
||||||
// views from the view target - a `source` and a `destination`.
|
|
||||||
// `source` is the "current" main texture and you _must_ write into
|
|
||||||
// `destination` because calling `post_process_write()` on the
|
|
||||||
// [`ViewTarget`] will internally flip the [`ViewTarget`]'s main
|
|
||||||
// texture to the `destination` texture. Failing to do so will cause
|
|
||||||
// the current main texture information to be lost.
|
|
||||||
let post_process = view_target.post_process_write();
|
|
||||||
|
|
||||||
// The bind_group gets created each frame.
|
|
||||||
//
|
|
||||||
// Normally, you would create a bind_group in the Queue set, but this doesn't work with the post_process_write().
|
|
||||||
// The reason it doesn't work is because each post_process_write will alternate the source/destination.
|
|
||||||
// The only way to have the correct source/destination for the bind_group is to make sure you get it during the node execution.
|
|
||||||
let bind_group = render_context
|
|
||||||
.render_device()
|
|
||||||
.create_bind_group(&BindGroupDescriptor {
|
|
||||||
label: Some("post_process_bind_group"),
|
|
||||||
layout: &post_process_pipeline.layout,
|
|
||||||
// It's important for this to match the BindGroupLayout defined in the PostProcessPipeline
|
|
||||||
entries: &[
|
|
||||||
BindGroupEntry {
|
|
||||||
binding: 0,
|
|
||||||
// Make sure to use the source view
|
|
||||||
resource: BindingResource::TextureView(post_process.source),
|
|
||||||
},
|
|
||||||
BindGroupEntry {
|
|
||||||
binding: 1,
|
|
||||||
// Use the sampler created for the pipeline
|
|
||||||
resource: BindingResource::Sampler(&post_process_pipeline.sampler),
|
|
||||||
},
|
|
||||||
BindGroupEntry {
|
|
||||||
binding: 2,
|
|
||||||
// Set the settings binding
|
|
||||||
resource: settings_binding.clone(),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
// Begin the render pass
|
|
||||||
let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
|
|
||||||
label: Some("post_process_pass"),
|
|
||||||
color_attachments: &[Some(RenderPassColorAttachment {
|
|
||||||
// We need to specify the post process destination view here
|
|
||||||
// to make sure we write to the appropriate texture.
|
|
||||||
view: post_process.destination,
|
|
||||||
resolve_target: None,
|
|
||||||
ops: Operations::default(),
|
|
||||||
})],
|
|
||||||
depth_stencil_attachment: None,
|
|
||||||
});
|
|
||||||
|
|
||||||
// This is mostly just wgpu boilerplate for drawing a fullscreen triangle,
|
|
||||||
// using the pipeline/bind_group created above
|
|
||||||
render_pass.set_render_pipeline(pipeline);
|
|
||||||
render_pass.set_bind_group(0, &bind_group, &[]);
|
|
||||||
render_pass.draw(0..3, 0..1);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This contains global data used by the render pipeline. This will be created once on startup.
|
|
||||||
#[derive(Resource)]
|
|
||||||
struct PostProcessPipeline {
|
|
||||||
layout: BindGroupLayout,
|
|
||||||
sampler: Sampler,
|
|
||||||
pipeline_id: CachedRenderPipelineId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromWorld for PostProcessPipeline {
|
|
||||||
fn from_world(world: &mut World) -> Self {
|
|
||||||
let render_device = world.resource::<RenderDevice>();
|
|
||||||
|
|
||||||
// We need to define the bind group layout used for our pipeline
|
|
||||||
let layout = render_device.create_bind_group_layout(&BindGroupLayoutDescriptor {
|
|
||||||
label: Some("post_process_bind_group_layout"),
|
|
||||||
entries: &[
|
|
||||||
// The screen texture
|
|
||||||
BindGroupLayoutEntry {
|
|
||||||
binding: 0,
|
|
||||||
visibility: ShaderStages::FRAGMENT,
|
|
||||||
ty: BindingType::Texture {
|
|
||||||
sample_type: TextureSampleType::Float { filterable: true },
|
|
||||||
view_dimension: TextureViewDimension::D2,
|
|
||||||
multisampled: false,
|
|
||||||
},
|
|
||||||
count: None,
|
|
||||||
},
|
|
||||||
// The sampler that will be used to sample the screen texture
|
|
||||||
BindGroupLayoutEntry {
|
|
||||||
binding: 1,
|
|
||||||
visibility: ShaderStages::FRAGMENT,
|
|
||||||
ty: BindingType::Sampler(SamplerBindingType::Filtering),
|
|
||||||
count: None,
|
|
||||||
},
|
|
||||||
// The settings uniform that will control the effect
|
|
||||||
BindGroupLayoutEntry {
|
|
||||||
binding: 2,
|
|
||||||
visibility: ShaderStages::FRAGMENT,
|
|
||||||
ty: BindingType::Buffer {
|
|
||||||
ty: bevy::render::render_resource::BufferBindingType::Uniform,
|
|
||||||
has_dynamic_offset: false,
|
|
||||||
min_binding_size: None,
|
|
||||||
},
|
|
||||||
count: None,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
// We can create the sampler here since it won't change at runtime and doesn't depend on the view
|
|
||||||
let sampler = render_device.create_sampler(&SamplerDescriptor::default());
|
|
||||||
|
|
||||||
// Get the shader handle
|
|
||||||
let shader = world
|
|
||||||
.resource::<AssetServer>()
|
|
||||||
.load("shaders/post_process_pass.wgsl");
|
|
||||||
|
|
||||||
let pipeline_id = world
|
|
||||||
.resource_mut::<PipelineCache>()
|
|
||||||
// This will add the pipeline to the cache and queue it's creation
|
|
||||||
.queue_render_pipeline(RenderPipelineDescriptor {
|
|
||||||
label: Some("post_process_pipeline".into()),
|
|
||||||
layout: vec![layout.clone()],
|
|
||||||
// This will setup a fullscreen triangle for the vertex state
|
|
||||||
vertex: fullscreen_shader_vertex_state(),
|
|
||||||
fragment: Some(FragmentState {
|
|
||||||
shader,
|
|
||||||
shader_defs: vec![],
|
|
||||||
// Make sure this matches the entry point of your shader.
|
|
||||||
// It can be anything as long as it matches here and in the shader.
|
|
||||||
entry_point: "fragment".into(),
|
|
||||||
targets: vec![Some(ColorTargetState {
|
|
||||||
format: TextureFormat::bevy_default(),
|
|
||||||
blend: None,
|
|
||||||
write_mask: ColorWrites::ALL,
|
|
||||||
})],
|
|
||||||
}),
|
|
||||||
// All of the following property are not important for this effect so just use the default values.
|
|
||||||
// This struct doesn't have the Default trai implemented because not all field can have a default value.
|
|
||||||
primitive: PrimitiveState::default(),
|
|
||||||
depth_stencil: None,
|
|
||||||
multisample: MultisampleState::default(),
|
|
||||||
push_constant_ranges: vec![],
|
|
||||||
});
|
|
||||||
|
|
||||||
Self {
|
|
||||||
layout,
|
|
||||||
sampler,
|
|
||||||
pipeline_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is the component that will get passed to the shader
|
|
||||||
#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]
|
|
||||||
struct PostProcessSettings {
|
|
||||||
intensity: f32,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set up a simple 3D scene
|
|
||||||
fn setup(
|
|
||||||
mut commands: Commands,
|
|
||||||
mut meshes: ResMut<Assets<Mesh>>,
|
|
||||||
mut materials: ResMut<Assets<StandardMaterial>>,
|
|
||||||
) {
|
|
||||||
// camera
|
|
||||||
commands.spawn((
|
|
||||||
Camera3dBundle {
|
|
||||||
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 5.0))
|
|
||||||
.looking_at(Vec3::default(), Vec3::Y),
|
|
||||||
camera_3d: Camera3d {
|
|
||||||
clear_color: ClearColorConfig::Custom(Color::WHITE),
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
// Add the setting to the camera.
|
|
||||||
// This component is also used to determine on which camera to run the post processing effect.
|
|
||||||
PostProcessSettings { intensity: 0.02 },
|
|
||||||
));
|
|
||||||
|
|
||||||
// cube
|
|
||||||
commands.spawn((
|
|
||||||
PbrBundle {
|
|
||||||
mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })),
|
|
||||||
material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()),
|
|
||||||
transform: Transform::from_xyz(0.0, 0.5, 0.0),
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
Rotates,
|
|
||||||
));
|
|
||||||
// light
|
|
||||||
commands.spawn(PointLightBundle {
|
|
||||||
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 10.0)),
|
|
||||||
..default()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Component)]
|
|
||||||
struct Rotates;
|
|
||||||
|
|
||||||
/// Rotates any entity around the x and y axis
|
|
||||||
fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {
|
|
||||||
for mut transform in &mut query {
|
|
||||||
transform.rotate_x(0.55 * time.delta_seconds());
|
|
||||||
transform.rotate_z(0.15 * time.delta_seconds());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Change the intensity over time to show that the effect is controlled from the main world
|
|
||||||
fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {
|
|
||||||
for mut setting in &mut settings {
|
|
||||||
let mut intensity = time.elapsed_seconds().sin();
|
|
||||||
// Make it loop periodically
|
|
||||||
intensity = intensity.sin();
|
|
||||||
// Remap it to 0..1 because the intensity can't be negative
|
|
||||||
intensity = intensity * 0.5 + 0.5;
|
|
||||||
// Scale it to a more reasonable level
|
|
||||||
intensity *= 0.015;
|
|
||||||
|
|
||||||
// Set the intensity. This will then be extracted to the render world and uploaded to the gpu automatically.
|
|
||||||
setting.intensity = intensity;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,188 +1,393 @@
|
||||||
//! A custom post processing effect, using two cameras, with one reusing the render texture of the first one.
|
//! This example shows how to create a custom render pass that runs after the main pass
|
||||||
//! Here a chromatic aberration is applied to a 3d scene containing a rotating cube.
|
//! and reads the texture generated by the main pass.
|
||||||
//! This example is useful to implement your own post-processing effect such as
|
//!
|
||||||
//! edge detection, blur, pixelization, vignette... and countless others.
|
//! The example shader is a very simple implementation of chromatic aberration.
|
||||||
|
//!
|
||||||
|
//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu.
|
||||||
|
|
||||||
use bevy::{
|
use bevy::{
|
||||||
core_pipeline::clear_color::ClearColorConfig,
|
core_pipeline::{
|
||||||
prelude::*,
|
clear_color::ClearColorConfig, core_3d,
|
||||||
reflect::TypeUuid,
|
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
||||||
render::{
|
},
|
||||||
camera::RenderTarget,
|
prelude::*,
|
||||||
render_resource::{
|
render::{
|
||||||
AsBindGroup, Extent3d, ShaderRef, TextureDescriptor, TextureDimension, TextureFormat,
|
extract_component::{
|
||||||
TextureUsages,
|
ComponentUniforms, ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin,
|
||||||
},
|
},
|
||||||
texture::BevyDefault,
|
render_graph::{Node, NodeRunError, RenderGraphApp, RenderGraphContext},
|
||||||
view::RenderLayers,
|
render_resource::{
|
||||||
|
BindGroupDescriptor, BindGroupEntry, BindGroupLayout, BindGroupLayoutDescriptor,
|
||||||
|
BindGroupLayoutEntry, BindingResource, BindingType, CachedRenderPipelineId,
|
||||||
|
ColorTargetState, ColorWrites, FragmentState, MultisampleState, Operations,
|
||||||
|
PipelineCache, PrimitiveState, RenderPassColorAttachment, RenderPassDescriptor,
|
||||||
|
RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, ShaderStages,
|
||||||
|
ShaderType, TextureFormat, TextureSampleType, TextureViewDimension,
|
||||||
|
},
|
||||||
|
renderer::{RenderContext, RenderDevice},
|
||||||
|
texture::BevyDefault,
|
||||||
|
view::{ExtractedView, ViewTarget},
|
||||||
|
RenderApp,
|
||||||
},
|
},
|
||||||
sprite::{Material2d, Material2dPlugin, MaterialMesh2dBundle},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
App::new()
|
App::new()
|
||||||
.add_plugins(DefaultPlugins)
|
.add_plugins(DefaultPlugins.set(AssetPlugin {
|
||||||
.add_plugin(Material2dPlugin::<PostProcessingMaterial>::default())
|
// Hot reloading the shader works correctly
|
||||||
|
watch_for_changes: true,
|
||||||
|
..default()
|
||||||
|
}))
|
||||||
|
.add_plugin(PostProcessPlugin)
|
||||||
.add_systems(Startup, setup)
|
.add_systems(Startup, setup)
|
||||||
.add_systems(Update, main_camera_cube_rotator_system)
|
.add_systems(Update, (rotate, update_settings))
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Marks the first camera cube (rendered to a texture.)
|
/// It is generally encouraged to set up post processing effects as a plugin
|
||||||
#[derive(Component)]
|
struct PostProcessPlugin;
|
||||||
struct MainCube;
|
|
||||||
|
|
||||||
|
impl Plugin for PostProcessPlugin {
|
||||||
|
fn build(&self, app: &mut App) {
|
||||||
|
app
|
||||||
|
// The settings will be a component that lives in the main world but will
|
||||||
|
// be extracted to the render world every frame.
|
||||||
|
// This makes it possible to control the effect from the main world.
|
||||||
|
// This plugin will take care of extracting it automatically.
|
||||||
|
// It's important to derive [`ExtractComponent`] on [`PostProcessingSettings`]
|
||||||
|
// for this plugin to work correctly.
|
||||||
|
.add_plugin(ExtractComponentPlugin::<PostProcessSettings>::default())
|
||||||
|
// The settings will also be the data used in the shader.
|
||||||
|
// This plugin will prepare the component for the GPU by creating a uniform buffer
|
||||||
|
// and writing the data to that buffer every frame.
|
||||||
|
.add_plugin(UniformComponentPlugin::<PostProcessSettings>::default());
|
||||||
|
|
||||||
|
// We need to get the render app from the main app
|
||||||
|
let Ok(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
render_app
|
||||||
|
// Initialize the pipeline
|
||||||
|
.init_resource::<PostProcessPipeline>()
|
||||||
|
// Bevy's renderer uses a render graph which is a collection of nodes in a directed acyclic graph.
|
||||||
|
// It currently runs on each view/camera and executes each node in the specified order.
|
||||||
|
// It will make sure that any node that needs a dependency from another node
|
||||||
|
// only runs when that dependency is done.
|
||||||
|
//
|
||||||
|
// Each node can execute arbitrary work, but it generally runs at least one render pass.
|
||||||
|
// A node only has access to the render world, so if you need data from the main world
|
||||||
|
// you need to extract it manually or with the plugin like above.
|
||||||
|
// Add a [`Node`] to the [`RenderGraph`]
|
||||||
|
// The Node needs to impl FromWorld
|
||||||
|
.add_render_graph_node::<PostProcessNode>(
|
||||||
|
// Specifiy the name of the graph, in this case we want the graph for 3d
|
||||||
|
core_3d::graph::NAME,
|
||||||
|
// It also needs the name of the node
|
||||||
|
PostProcessNode::NAME,
|
||||||
|
)
|
||||||
|
.add_render_graph_edges(
|
||||||
|
core_3d::graph::NAME,
|
||||||
|
// Specify the node ordering.
|
||||||
|
// This will automatically create all required node edges to enforce the given ordering.
|
||||||
|
&[
|
||||||
|
core_3d::graph::node::TONEMAPPING,
|
||||||
|
PostProcessNode::NAME,
|
||||||
|
core_3d::graph::node::END_MAIN_PASS_POST_PROCESSING,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The post process node used for the render graph
|
||||||
|
struct PostProcessNode {
|
||||||
|
// The node needs a query to gather data from the ECS in order to do its rendering,
|
||||||
|
// but it's not a normal system so we need to define it manually.
|
||||||
|
query: QueryState<&'static ViewTarget, With<ExtractedView>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PostProcessNode {
|
||||||
|
pub const NAME: &str = "post_process";
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromWorld for PostProcessNode {
|
||||||
|
fn from_world(world: &mut World) -> Self {
|
||||||
|
Self {
|
||||||
|
query: QueryState::new(world),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Node for PostProcessNode {
|
||||||
|
// This will run every frame before the run() method
|
||||||
|
// The important difference is that `self` is `mut` here
|
||||||
|
fn update(&mut self, world: &mut World) {
|
||||||
|
// Since this is not a system we need to update the query manually.
|
||||||
|
// This is mostly boilerplate. There are plans to remove this in the future.
|
||||||
|
// For now, you can just copy it.
|
||||||
|
self.query.update_archetypes(world);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runs the node logic
|
||||||
|
// This is where you encode draw commands.
|
||||||
|
//
|
||||||
|
// This will run on every view on which the graph is running. If you don't want your effect to run on every camera,
|
||||||
|
// you'll need to make sure you have a marker component to identify which camera(s) should run the effect.
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
graph_context: &mut RenderGraphContext,
|
||||||
|
render_context: &mut RenderContext,
|
||||||
|
world: &World,
|
||||||
|
) -> Result<(), NodeRunError> {
|
||||||
|
// Get the entity of the view for the render graph where this node is running
|
||||||
|
let view_entity = graph_context.view_entity();
|
||||||
|
|
||||||
|
// We get the data we need from the world based on the view entity passed to the node.
|
||||||
|
// The data is the query that was defined earlier in the [`PostProcessNode`]
|
||||||
|
let Ok(view_target) = self.query.get_manual(world, view_entity) else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the pipeline resource that contains the global data we need to create the render pipeline
|
||||||
|
let post_process_pipeline = world.resource::<PostProcessPipeline>();
|
||||||
|
|
||||||
|
// The pipeline cache is a cache of all previously created pipelines.
|
||||||
|
// It is required to avoid creating a new pipeline each frame, which is expensive due to shader compilation.
|
||||||
|
let pipeline_cache = world.resource::<PipelineCache>();
|
||||||
|
|
||||||
|
// Get the pipeline from the cache
|
||||||
|
let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id) else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the settings uniform binding
|
||||||
|
let settings_uniforms = world.resource::<ComponentUniforms<PostProcessSettings>>();
|
||||||
|
let Some(settings_binding) = settings_uniforms.uniforms().binding() else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
// This will start a new "post process write", obtaining two texture
|
||||||
|
// views from the view target - a `source` and a `destination`.
|
||||||
|
// `source` is the "current" main texture and you _must_ write into
|
||||||
|
// `destination` because calling `post_process_write()` on the
|
||||||
|
// [`ViewTarget`] will internally flip the [`ViewTarget`]'s main
|
||||||
|
// texture to the `destination` texture. Failing to do so will cause
|
||||||
|
// the current main texture information to be lost.
|
||||||
|
let post_process = view_target.post_process_write();
|
||||||
|
|
||||||
|
// The bind_group gets created each frame.
|
||||||
|
//
|
||||||
|
// Normally, you would create a bind_group in the Queue set, but this doesn't work with the post_process_write().
|
||||||
|
// The reason it doesn't work is because each post_process_write will alternate the source/destination.
|
||||||
|
// The only way to have the correct source/destination for the bind_group is to make sure you get it during the node execution.
|
||||||
|
let bind_group = render_context
|
||||||
|
.render_device()
|
||||||
|
.create_bind_group(&BindGroupDescriptor {
|
||||||
|
label: Some("post_process_bind_group"),
|
||||||
|
layout: &post_process_pipeline.layout,
|
||||||
|
// It's important for this to match the BindGroupLayout defined in the PostProcessPipeline
|
||||||
|
entries: &[
|
||||||
|
BindGroupEntry {
|
||||||
|
binding: 0,
|
||||||
|
// Make sure to use the source view
|
||||||
|
resource: BindingResource::TextureView(post_process.source),
|
||||||
|
},
|
||||||
|
BindGroupEntry {
|
||||||
|
binding: 1,
|
||||||
|
// Use the sampler created for the pipeline
|
||||||
|
resource: BindingResource::Sampler(&post_process_pipeline.sampler),
|
||||||
|
},
|
||||||
|
BindGroupEntry {
|
||||||
|
binding: 2,
|
||||||
|
// Set the settings binding
|
||||||
|
resource: settings_binding.clone(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Begin the render pass
|
||||||
|
let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
|
||||||
|
label: Some("post_process_pass"),
|
||||||
|
color_attachments: &[Some(RenderPassColorAttachment {
|
||||||
|
// We need to specify the post process destination view here
|
||||||
|
// to make sure we write to the appropriate texture.
|
||||||
|
view: post_process.destination,
|
||||||
|
resolve_target: None,
|
||||||
|
ops: Operations::default(),
|
||||||
|
})],
|
||||||
|
depth_stencil_attachment: None,
|
||||||
|
});
|
||||||
|
|
||||||
|
// This is mostly just wgpu boilerplate for drawing a fullscreen triangle,
|
||||||
|
// using the pipeline/bind_group created above
|
||||||
|
render_pass.set_render_pipeline(pipeline);
|
||||||
|
render_pass.set_bind_group(0, &bind_group, &[]);
|
||||||
|
render_pass.draw(0..3, 0..1);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This contains global data used by the render pipeline. This will be created once on startup.
|
||||||
|
#[derive(Resource)]
|
||||||
|
struct PostProcessPipeline {
|
||||||
|
layout: BindGroupLayout,
|
||||||
|
sampler: Sampler,
|
||||||
|
pipeline_id: CachedRenderPipelineId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromWorld for PostProcessPipeline {
|
||||||
|
fn from_world(world: &mut World) -> Self {
|
||||||
|
let render_device = world.resource::<RenderDevice>();
|
||||||
|
|
||||||
|
// We need to define the bind group layout used for our pipeline
|
||||||
|
let layout = render_device.create_bind_group_layout(&BindGroupLayoutDescriptor {
|
||||||
|
label: Some("post_process_bind_group_layout"),
|
||||||
|
entries: &[
|
||||||
|
// The screen texture
|
||||||
|
BindGroupLayoutEntry {
|
||||||
|
binding: 0,
|
||||||
|
visibility: ShaderStages::FRAGMENT,
|
||||||
|
ty: BindingType::Texture {
|
||||||
|
sample_type: TextureSampleType::Float { filterable: true },
|
||||||
|
view_dimension: TextureViewDimension::D2,
|
||||||
|
multisampled: false,
|
||||||
|
},
|
||||||
|
count: None,
|
||||||
|
},
|
||||||
|
// The sampler that will be used to sample the screen texture
|
||||||
|
BindGroupLayoutEntry {
|
||||||
|
binding: 1,
|
||||||
|
visibility: ShaderStages::FRAGMENT,
|
||||||
|
ty: BindingType::Sampler(SamplerBindingType::Filtering),
|
||||||
|
count: None,
|
||||||
|
},
|
||||||
|
// The settings uniform that will control the effect
|
||||||
|
BindGroupLayoutEntry {
|
||||||
|
binding: 2,
|
||||||
|
visibility: ShaderStages::FRAGMENT,
|
||||||
|
ty: BindingType::Buffer {
|
||||||
|
ty: bevy::render::render_resource::BufferBindingType::Uniform,
|
||||||
|
has_dynamic_offset: false,
|
||||||
|
min_binding_size: None,
|
||||||
|
},
|
||||||
|
count: None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// We can create the sampler here since it won't change at runtime and doesn't depend on the view
|
||||||
|
let sampler = render_device.create_sampler(&SamplerDescriptor::default());
|
||||||
|
|
||||||
|
// Get the shader handle
|
||||||
|
let shader = world
|
||||||
|
.resource::<AssetServer>()
|
||||||
|
.load("shaders/post_processing.wgsl");
|
||||||
|
|
||||||
|
let pipeline_id = world
|
||||||
|
.resource_mut::<PipelineCache>()
|
||||||
|
// This will add the pipeline to the cache and queue it's creation
|
||||||
|
.queue_render_pipeline(RenderPipelineDescriptor {
|
||||||
|
label: Some("post_process_pipeline".into()),
|
||||||
|
layout: vec![layout.clone()],
|
||||||
|
// This will setup a fullscreen triangle for the vertex state
|
||||||
|
vertex: fullscreen_shader_vertex_state(),
|
||||||
|
fragment: Some(FragmentState {
|
||||||
|
shader,
|
||||||
|
shader_defs: vec![],
|
||||||
|
// Make sure this matches the entry point of your shader.
|
||||||
|
// It can be anything as long as it matches here and in the shader.
|
||||||
|
entry_point: "fragment".into(),
|
||||||
|
targets: vec![Some(ColorTargetState {
|
||||||
|
format: TextureFormat::bevy_default(),
|
||||||
|
blend: None,
|
||||||
|
write_mask: ColorWrites::ALL,
|
||||||
|
})],
|
||||||
|
}),
|
||||||
|
// All of the following property are not important for this effect so just use the default values.
|
||||||
|
// This struct doesn't have the Default trai implemented because not all field can have a default value.
|
||||||
|
primitive: PrimitiveState::default(),
|
||||||
|
depth_stencil: None,
|
||||||
|
multisample: MultisampleState::default(),
|
||||||
|
push_constant_ranges: vec![],
|
||||||
|
});
|
||||||
|
|
||||||
|
Self {
|
||||||
|
layout,
|
||||||
|
sampler,
|
||||||
|
pipeline_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is the component that will get passed to the shader
|
||||||
|
#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]
|
||||||
|
struct PostProcessSettings {
|
||||||
|
intensity: f32,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set up a simple 3D scene
|
||||||
fn setup(
|
fn setup(
|
||||||
mut commands: Commands,
|
mut commands: Commands,
|
||||||
windows: Query<&Window>,
|
|
||||||
mut meshes: ResMut<Assets<Mesh>>,
|
mut meshes: ResMut<Assets<Mesh>>,
|
||||||
mut post_processing_materials: ResMut<Assets<PostProcessingMaterial>>,
|
|
||||||
mut materials: ResMut<Assets<StandardMaterial>>,
|
mut materials: ResMut<Assets<StandardMaterial>>,
|
||||||
mut images: ResMut<Assets<Image>>,
|
|
||||||
) {
|
) {
|
||||||
// This assumes we only have a single window
|
// camera
|
||||||
let window = windows.single();
|
|
||||||
|
|
||||||
let size = Extent3d {
|
|
||||||
width: window.resolution.physical_width(),
|
|
||||||
height: window.resolution.physical_height(),
|
|
||||||
..default()
|
|
||||||
};
|
|
||||||
|
|
||||||
// This is the texture that will be rendered to.
|
|
||||||
let mut image = Image {
|
|
||||||
texture_descriptor: TextureDescriptor {
|
|
||||||
label: None,
|
|
||||||
size,
|
|
||||||
dimension: TextureDimension::D2,
|
|
||||||
format: TextureFormat::bevy_default(),
|
|
||||||
mip_level_count: 1,
|
|
||||||
sample_count: 1,
|
|
||||||
usage: TextureUsages::TEXTURE_BINDING
|
|
||||||
| TextureUsages::COPY_DST
|
|
||||||
| TextureUsages::RENDER_ATTACHMENT,
|
|
||||||
view_formats: &[],
|
|
||||||
},
|
|
||||||
..default()
|
|
||||||
};
|
|
||||||
|
|
||||||
// fill image.data with zeroes
|
|
||||||
image.resize(size);
|
|
||||||
|
|
||||||
let image_handle = images.add(image);
|
|
||||||
|
|
||||||
let cube_handle = meshes.add(Mesh::from(shape::Cube { size: 4.0 }));
|
|
||||||
let cube_material_handle = materials.add(StandardMaterial {
|
|
||||||
base_color: Color::rgb(0.8, 0.7, 0.6),
|
|
||||||
reflectance: 0.02,
|
|
||||||
unlit: false,
|
|
||||||
..default()
|
|
||||||
});
|
|
||||||
|
|
||||||
// The cube that will be rendered to the texture.
|
|
||||||
commands.spawn((
|
|
||||||
PbrBundle {
|
|
||||||
mesh: cube_handle,
|
|
||||||
material: cube_material_handle,
|
|
||||||
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 1.0)),
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
MainCube,
|
|
||||||
));
|
|
||||||
|
|
||||||
// Light
|
|
||||||
// NOTE: Currently lights are ignoring render layers - see https://github.com/bevyengine/bevy/issues/3462
|
|
||||||
commands.spawn(PointLightBundle {
|
|
||||||
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 10.0)),
|
|
||||||
..default()
|
|
||||||
});
|
|
||||||
|
|
||||||
// Main camera, first to render
|
|
||||||
commands.spawn((
|
commands.spawn((
|
||||||
Camera3dBundle {
|
Camera3dBundle {
|
||||||
|
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 5.0))
|
||||||
|
.looking_at(Vec3::default(), Vec3::Y),
|
||||||
camera_3d: Camera3d {
|
camera_3d: Camera3d {
|
||||||
clear_color: ClearColorConfig::Custom(Color::WHITE),
|
clear_color: ClearColorConfig::Custom(Color::WHITE),
|
||||||
..default()
|
..default()
|
||||||
},
|
},
|
||||||
camera: Camera {
|
|
||||||
target: RenderTarget::Image(image_handle.clone()),
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 15.0))
|
|
||||||
.looking_at(Vec3::default(), Vec3::Y),
|
|
||||||
..default()
|
..default()
|
||||||
},
|
},
|
||||||
// Disable UI rendering for the first pass camera. This prevents double rendering of UI at
|
// Add the setting to the camera.
|
||||||
// the cost of rendering the UI without any post processing effects.
|
// This component is also used to determine on which camera to run the post processing effect.
|
||||||
UiCameraConfig { show_ui: false },
|
PostProcessSettings { intensity: 0.02 },
|
||||||
));
|
));
|
||||||
|
|
||||||
// This specifies the layer used for the post processing camera, which will be attached to the post processing camera and 2d quad.
|
// cube
|
||||||
let post_processing_pass_layer = RenderLayers::layer((RenderLayers::TOTAL_LAYERS - 1) as u8);
|
commands.spawn((
|
||||||
|
PbrBundle {
|
||||||
let quad_handle = meshes.add(Mesh::from(shape::Quad::new(Vec2::new(
|
mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })),
|
||||||
size.width as f32,
|
material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()),
|
||||||
size.height as f32,
|
transform: Transform::from_xyz(0.0, 0.5, 0.0),
|
||||||
))));
|
..default()
|
||||||
|
},
|
||||||
// This material has the texture that has been rendered.
|
Rotates,
|
||||||
let material_handle = post_processing_materials.add(PostProcessingMaterial {
|
));
|
||||||
source_image: image_handle,
|
// light
|
||||||
|
commands.spawn(PointLightBundle {
|
||||||
|
transform: Transform::from_translation(Vec3::new(0.0, 0.0, 10.0)),
|
||||||
|
..default()
|
||||||
});
|
});
|
||||||
|
|
||||||
// Post processing 2d quad, with material using the render texture done by the main camera, with a custom shader.
|
|
||||||
commands.spawn((
|
|
||||||
MaterialMesh2dBundle {
|
|
||||||
mesh: quad_handle.into(),
|
|
||||||
material: material_handle,
|
|
||||||
transform: Transform {
|
|
||||||
translation: Vec3::new(0.0, 0.0, 1.5),
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
post_processing_pass_layer,
|
|
||||||
));
|
|
||||||
|
|
||||||
// The post-processing pass camera.
|
|
||||||
commands.spawn((
|
|
||||||
Camera2dBundle {
|
|
||||||
camera: Camera {
|
|
||||||
// renders after the first main camera which has default value: 0.
|
|
||||||
order: 1,
|
|
||||||
..default()
|
|
||||||
},
|
|
||||||
..Camera2dBundle::default()
|
|
||||||
},
|
|
||||||
post_processing_pass_layer,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Rotates the cube rendered by the main camera
|
#[derive(Component)]
|
||||||
fn main_camera_cube_rotator_system(
|
struct Rotates;
|
||||||
time: Res<Time>,
|
|
||||||
mut query: Query<&mut Transform, With<MainCube>>,
|
/// Rotates any entity around the x and y axis
|
||||||
) {
|
fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {
|
||||||
for mut transform in &mut query {
|
for mut transform in &mut query {
|
||||||
transform.rotate_x(0.55 * time.delta_seconds());
|
transform.rotate_x(0.55 * time.delta_seconds());
|
||||||
transform.rotate_z(0.15 * time.delta_seconds());
|
transform.rotate_z(0.15 * time.delta_seconds());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Region below declares of the custom material handling post processing effect
|
// Change the intensity over time to show that the effect is controlled from the main world
|
||||||
|
fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {
|
||||||
|
for mut setting in &mut settings {
|
||||||
|
let mut intensity = time.elapsed_seconds().sin();
|
||||||
|
// Make it loop periodically
|
||||||
|
intensity = intensity.sin();
|
||||||
|
// Remap it to 0..1 because the intensity can't be negative
|
||||||
|
intensity = intensity * 0.5 + 0.5;
|
||||||
|
// Scale it to a more reasonable level
|
||||||
|
intensity *= 0.015;
|
||||||
|
|
||||||
/// Our custom post processing material
|
// Set the intensity. This will then be extracted to the render world and uploaded to the gpu automatically.
|
||||||
#[derive(AsBindGroup, TypeUuid, Clone)]
|
setting.intensity = intensity;
|
||||||
#[uuid = "bc2f08eb-a0fb-43f1-a908-54871ea597d5"]
|
|
||||||
struct PostProcessingMaterial {
|
|
||||||
/// In this example, this image will be the result of the main camera.
|
|
||||||
#[texture(0)]
|
|
||||||
#[sampler(1)]
|
|
||||||
source_image: Handle<Image>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Material2d for PostProcessingMaterial {
|
|
||||||
fn fragment_shader() -> ShaderRef {
|
|
||||||
"shaders/custom_material_chromatic_aberration.wgsl".into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue