Fix error/typo in SMAA shader (#14338)

# Objective

- Actually use the value assigned to `d_xz`, like in [the original SMAA
implementation](https://github.com/iryoku/smaa/blob/master/SMAA.hlsl#L960).
This not already being the case was likely a mistake when converting
from HLSL to WGSL

## Solution

- Use `d_xz.x` and `d_xz.y` instead of `d.x` and `d.z`

## Testing

- Quickly tested on Windows 11, `x86_64-pc-windows-gnu` `1.79.0` with
the latest NVIDIA drivers. App runs with SMAA enabled and everything
seems to work as intended
- I didn't observe any major visual difference between this and the
previous version, though this should be more correct as it matches the
original SMAA implementation
This commit is contained in:
Lura 2024-07-16 01:40:39 +02:00 committed by François
parent 70a0c211ff
commit 42412f3500
No known key found for this signature in database

View file

@ -757,10 +757,10 @@ fn calculate_diag_weights(tex_coord: vec2<f32>, e: vec2<f32>, subsample_indices:
let d_xz = search_diag_2(tex_coord, vec2(-1.0, -1.0), &end);
if (textureSampleLevel(edges_texture, edges_sampler, tex_coord, 0.0, vec2(1, 0)).r > 0.0) {
let d_yw = search_diag_2(tex_coord, vec2(1.0, 1.0), &end);
d = vec4(d.x, d_yw.x, d.z, d_yw.y);
d = vec4(d_xz.x, d_yw.x, d_xz.y, d_yw.y);
d.y += f32(end.y > 0.9);
} else {
d = vec4(d.x, 0.0, d.z, 0.0);
d = vec4(d_xz.x, 0.0, d_xz.y, 0.0);
}
if (d.x + d.y > 2.0) { // d.x + d.y + 1 > 3