Revert portion of commit that accidentally included TAA PR changes

This (partially) reverts commit fd58c77f82.
This commit is contained in:
Marco Buono 2023-10-19 23:51:07 -03:00
parent 27faa49cdf
commit 4ed21000e3
2 changed files with 99 additions and 66 deletions

View file

@ -35,7 +35,7 @@ use bevy_render::{
ExtractSchedule, MainWorld, Render, RenderApp, RenderSet,
};
pub mod draw_3d_graph {
mod draw_3d_graph {
pub mod node {
/// Label for the TAA render node.
pub const TAA: &str = "taa";
@ -71,10 +71,7 @@ impl Plugin for TemporalAntiAliasPlugin {
prepare_taa_history_textures.in_set(RenderSet::PrepareResources),
),
)
.add_render_graph_node::<ViewNodeRunner<TemporalAntiAliasNode>>(
CORE_3D,
draw_3d_graph::node::TAA,
)
.add_render_graph_node::<ViewNodeRunner<TAANode>>(CORE_3D, draw_3d_graph::node::TAA)
.add_render_graph_edges(
CORE_3D,
&[
@ -113,13 +110,14 @@ pub struct TemporalAntiAliasBundle {
/// # Tradeoffs
///
/// Pros:
/// * Filters more types of aliasing than MSAA, such as textures and singular bright pixels (specular aliasing)
/// * Cost scales with screen/view resolution, unlike MSAA which scales with number of triangles
/// * Greatly increases the quality of stochastic rendering techniques such as SSAO, certain shadow map sampling methods, etc
/// * Filters more types of aliasing than MSAA, such as textures and singular bright pixels
/// * Greatly increases the quality of stochastic rendering techniques such as SSAO, shadow mapping, etc
///
/// Cons:
/// * Chance of "ghosting" - ghostly trails left behind moving objects
/// * Thin geometry, lighting detail, or texture lines may flicker noisily or disappear
/// * Thin geometry, lighting detail, or texture lines may flicker or disappear
/// * Slightly blurs the image, leading to a softer look (using an additional sharpening pass can reduce this)
///
/// Because TAA blends past frames with the current frame, when the frames differ too much
/// (such as with fast moving objects or camera cuts), ghosting artifacts may occur.
@ -132,7 +130,7 @@ pub struct TemporalAntiAliasBundle {
/// and add the [`DepthPrepass`], [`MotionVectorPrepass`], and [`TemporalJitter`]
/// components to your camera.
///
/// Currently cannot be used with [`bevy_render::camera::OrthographicProjection`].
/// Cannot be used with [`bevy_render::camera::OrthographicProjection`].
///
/// Currently does not support skinned meshes and morph targets.
/// There will probably be ghosting artifacts if used with them.
@ -153,7 +151,7 @@ pub struct TemporalAntiAliasSettings {
/// representative of the current frame, such as in sudden camera cuts.
///
/// After setting this to true, it will automatically be toggled
/// back to false at the end of the frame.
/// back to false after one frame.
pub reset: bool,
}
@ -164,15 +162,15 @@ impl Default for TemporalAntiAliasSettings {
}
#[derive(Default)]
pub struct TemporalAntiAliasNode;
struct TAANode;
impl ViewNode for TemporalAntiAliasNode {
impl ViewNode for TAANode {
type ViewQuery = (
&'static ExtractedCamera,
&'static ViewTarget,
&'static TemporalAntiAliasHistoryTextures,
&'static TAAHistoryTextures,
&'static ViewPrepassTextures,
&'static TemporalAntiAliasPipelineId,
&'static TAAPipelineId,
);
fn run(
@ -469,7 +467,7 @@ fn prepare_taa_jitter_and_mip_bias(
}
#[derive(Component)]
pub struct TemporalAntiAliasHistoryTextures {
struct TAAHistoryTextures {
write: CachedTexture,
read: CachedTexture,
}
@ -509,12 +507,12 @@ fn prepare_taa_history_textures(
let history_2_texture = texture_cache.get(&render_device, texture_descriptor);
let textures = if frame_count.0 % 2 == 0 {
TemporalAntiAliasHistoryTextures {
TAAHistoryTextures {
write: history_1_texture,
read: history_2_texture,
}
} else {
TemporalAntiAliasHistoryTextures {
TAAHistoryTextures {
write: history_2_texture,
read: history_1_texture,
}
@ -526,7 +524,7 @@ fn prepare_taa_history_textures(
}
#[derive(Component)]
pub struct TemporalAntiAliasPipelineId(CachedRenderPipelineId);
struct TAAPipelineId(CachedRenderPipelineId);
fn prepare_taa_pipelines(
mut commands: Commands,
@ -548,8 +546,6 @@ fn prepare_taa_pipelines(
pipelines.specialize(&pipeline_cache, &pipeline, pipeline_key);
}
commands
.entity(entity)
.insert(TemporalAntiAliasPipelineId(pipeline_id));
commands.entity(entity).insert(TAAPipelineId(pipeline_id));
}
}

View file

@ -1,10 +1,17 @@
// References:
// https://www.elopezr.com/temporal-aa-and-the-quest-for-the-holy-trail
// https://alextardif.com/TAA.html
// http://behindthepixels.io/assets/files/TemporalAA.pdf
// http://leiy.cc/publications/TAA/TAA_EG2020_Talk.pdf
// https://advances.realtimerendering.com/s2014/index.html#_HIGH-QUALITY_TEMPORAL_SUPERSAMPLING
// Controls how much to blend between the current and past samples
// Lower numbers = less of the current sample and more of the past sample = more smoothing
// Values chosen empirically
const DEFAULT_HISTORY_BLEND_RATE: f32 = 0.1; // Default blend rate to use when no confidence in history
const MIN_HISTORY_BLEND_RATE: f32 = 0.015; // Minimum blend rate allowed, to ensure at least some of the current sample is used
#import bevy_core_pipeline::fullscreen_vertex_shader
@group(0) @binding(0) var view_target: texture_2d<f32>;
@group(0) @binding(1) var history: texture_2d<f32>;
@group(0) @binding(2) var motion_vectors: texture_2d<f32>;
@ -17,7 +24,7 @@ struct Output {
@location(1) history: vec4<f32>,
};
// TAA is ideally applied after tonemapping (if not tonemapping in the main pass), but before post processing
// TAA is ideally applied after tonemapping, but before post processing
// Post processing wants to go before tonemapping, which conflicts
// Solution: Put TAA before tonemapping, tonemap TAA input, apply TAA, invert-tonemap TAA output
// https://advances.realtimerendering.com/s2014/index.html#_HIGH-QUALITY_TEMPORAL_SUPERSAMPLING, slide 20
@ -58,11 +65,11 @@ fn clip_towards_aabb_center(history_color: vec3<f32>, current_color: vec3<f32>,
}
fn sample_history(u: f32, v: f32) -> vec3<f32> {
return textureSampleLevel(history, linear_sampler, vec2(u, v), 0.0).rgb;
return textureSample(history, linear_sampler, vec2(u, v)).rgb;
}
fn sample_view_target(uv: vec2<f32>) -> vec3<f32> {
var sample = textureSampleLevel(view_target, nearest_sampler, uv, 0.0).rgb;
var sample = textureSample(view_target, nearest_sampler, uv).rgb;
#ifdef TONEMAP
sample = tonemap(sample);
#endif
@ -74,42 +81,50 @@ fn taa(@location(0) uv: vec2<f32>) -> Output {
let texture_size = vec2<f32>(textureDimensions(view_target));
let texel_size = 1.0 / texture_size;
// Loop over 3x3 neighborhood of the pre-TAA rendered texture
// https://alextardif.com/TAA.html
var current_color = vec3(0.0);
var moment_1 = vec3(0.0);
var moment_2 = vec3(0.0);
var closest_depth = 0.0;
// Fetch the current sample
let original_color = textureSample(view_target, nearest_sampler, uv);
var current_color = original_color.rgb;
#ifdef TONEMAP
current_color = tonemap(current_color);
#endif
#ifndef RESET
// Pick the closest motion_vector from 5 samples (reduces aliasing on the edges of moving entities)
// https://advances.realtimerendering.com/s2014/index.html#_HIGH-QUALITY_TEMPORAL_SUPERSAMPLING, slide 27
let offset = texel_size * 2.0;
let d_uv_tl = uv + vec2(-offset.x, offset.y);
let d_uv_tr = uv + vec2(offset.x, offset.y);
let d_uv_bl = uv + vec2(-offset.x, -offset.y);
let d_uv_br = uv + vec2(offset.x, -offset.y);
var closest_uv = uv;
var weights = array(0.05556, 0.88889, 0.05556);
for (var x = -1.0; x <= 1.0; x += 1.0) {
for (var y = -1.0; y <= 1.0; y += 1.0) {
let sample_uv = uv + (vec2(x, y) * texel_size);
let sample = sample_view_target(sample_uv);
// Apply Mitchell-Netravali kernel over the jittered 3x3 neighborhood to reduce softness
let weight = weights[u32(x + 1.0)] * weights[u32(y + 1.0)];
current_color += sample * weight;
// Calculate first and second color moments for use with variance clipping
moment_1 += sample;
moment_2 += sample * sample;
// Find closest pixel to take motion vectors from (reduces aliasing on the edges of moving entities)
let sample_depth = textureSampleLevel(depth, nearest_sampler, sample_uv, 0.0);
if sample_depth > closest_depth {
closest_depth = sample_depth;
closest_uv = sample_uv;
}
}
let d_tl = textureSample(depth, nearest_sampler, d_uv_tl);
let d_tr = textureSample(depth, nearest_sampler, d_uv_tr);
var closest_depth = textureSample(depth, nearest_sampler, uv);
let d_bl = textureSample(depth, nearest_sampler, d_uv_bl);
let d_br = textureSample(depth, nearest_sampler, d_uv_br);
if d_tl > closest_depth {
closest_uv = d_uv_tl;
closest_depth = d_tl;
}
if d_tr > closest_depth {
closest_uv = d_uv_tr;
closest_depth = d_tr;
}
if d_bl > closest_depth {
closest_uv = d_uv_bl;
closest_depth = d_bl;
}
if d_br > closest_depth {
closest_uv = d_uv_br;
}
let closest_motion_vector = textureSample(motion_vectors, nearest_sampler, closest_uv).rg;
// Reproject to find the equivalent sample from the past
// Uses 5-sample Catmull-Rom filtering (reduces blurriness)
// Catmull-Rom filtering: https://gist.github.com/TheRealMJP/c83b8c0f46b63f3a88a5986f4fa982b1
// Ignoring corners: https://www.activision.com/cdn/research/Dynamic_Temporal_Antialiasing_and_Upsampling_in_Call_of_Duty_v4.pdf#page=68
// Technically we should renormalize the weights since we're skipping the corners, but it's basically the same result
let history_uv = uv - textureSampleLevel(motion_vectors, nearest_sampler, closest_uv, 0.0).rg;
let history_uv = uv - closest_motion_vector;
let sample_position = history_uv * texture_size;
let texel_center = floor(sample_position - 0.5) + 0.5;
let f = sample_position - texel_center;
@ -130,30 +145,52 @@ fn taa(@location(0) uv: vec2<f32>) -> Output {
// Constrain past sample with 3x3 YCoCg variance clipping (reduces ghosting)
// YCoCg: https://advances.realtimerendering.com/s2014/index.html#_HIGH-QUALITY_TEMPORAL_SUPERSAMPLING, slide 33
// Variance clipping: https://developer.download.nvidia.com/gameworks/events/GDC2016/msalvi_temporal_supersampling.pdf
let s_tl = sample_view_target(uv + vec2(-texel_size.x, texel_size.y));
let s_tm = sample_view_target(uv + vec2( 0.0, texel_size.y));
let s_tr = sample_view_target(uv + vec2( texel_size.x, texel_size.y));
let s_ml = sample_view_target(uv + vec2(-texel_size.x, 0.0));
let s_mm = RGB_to_YCoCg(current_color);
let s_mr = sample_view_target(uv + vec2( texel_size.x, 0.0));
let s_bl = sample_view_target(uv + vec2(-texel_size.x, -texel_size.y));
let s_bm = sample_view_target(uv + vec2( 0.0, -texel_size.y));
let s_br = sample_view_target(uv + vec2( texel_size.x, -texel_size.y));
let moment_1 = s_tl + s_tm + s_tr + s_ml + s_mm + s_mr + s_bl + s_bm + s_br;
let moment_2 = (s_tl * s_tl) + (s_tm * s_tm) + (s_tr * s_tr) + (s_ml * s_ml) + (s_mm * s_mm) + (s_mr * s_mr) + (s_bl * s_bl) + (s_bm * s_bm) + (s_br * s_br);
let mean = moment_1 / 9.0;
let variance = (moment_2 / 9.0) - (mean * mean);
let std_deviation = sqrt(max(variance, vec3(0.0)));
history_color = clip_towards_aabb_center(history_color, current_color, mean - std_deviation, mean + std_deviation);
history_color = RGB_to_YCoCg(history_color);
history_color = clip_towards_aabb_center(history_color, s_mm, mean - std_deviation, mean + std_deviation);
history_color = YCoCg_to_RGB(history_color);
// Use more of the history if it's been visible for a few frames (reduces noise)
var accumulated_samples = textureSampleLevel(history, nearest_sampler, history_uv, 0.0).a;
// If the history_uv is pointing off-screen, reset accumulated sample count
accumulated_samples *= f32(all(saturate(history_uv) == history_uv));
#ifdef RESET
accumulated_samples = 0.0;
#endif
accumulated_samples = max(accumulated_samples + 1.0, 8.0);
// How confident we are that the history is representative of the current frame
var history_confidence = textureSample(history, nearest_sampler, uv).a;
let pixel_motion_vector = abs(closest_motion_vector) * texture_size;
if pixel_motion_vector.x < 0.01 && pixel_motion_vector.y < 0.01 {
// Increment when pixels are not moving
history_confidence += 10.0;
} else {
// Else reset
history_confidence = 1.0;
}
// Blend current and past sample
current_color = mix(history_color, current_color, 1.0 / accumulated_samples);
// Use more of the history if we're confident in it (reduces noise when there is no motion)
// https://hhoppe.com/supersample.pdf, section 4.1
let current_color_factor = clamp(1.0 / history_confidence, MIN_HISTORY_BLEND_RATE, DEFAULT_HISTORY_BLEND_RATE);
current_color = mix(history_color, current_color, current_color_factor);
#endif // #ifndef RESET
// Write output to history and view target
var out: Output;
out.history = vec4(current_color, accumulated_samples);
current_color = YCoCg_to_RGB(current_color);
#ifdef RESET
let history_confidence = 1.0 / MIN_HISTORY_BLEND_RATE;
#endif
out.history = vec4(current_color, history_confidence);
#ifdef TONEMAP
current_color = reverse_tonemap(current_color);
#endif
out.view_target = vec4(current_color, 1.0);
out.view_target = vec4(current_color, original_color.a);
return out;
}