diff options
author | Clément Foucault <foucault.clem@gmail.com> | 2020-11-05 19:57:19 +0300 |
---|---|---|
committer | Clément Foucault <foucault.clem@gmail.com> | 2020-11-05 20:00:27 +0300 |
commit | aae60f0fecf74c93d8c6e8a5e33ffd4fb6c76a3a (patch) | |
tree | ea3d92528c40431d6a70cc678c4a64948eabca30 | |
parent | 15eec7f8b985852219e842ef434f80b6cf9ed165 (diff) |
Fix T81752 EEVEE: Camera Motion Blur is not blending steps properly
This was due to improper calculation of velocity factor and an
error in the camera data swapping between two steps.
-rw-r--r-- | source/blender/draw/engines/eevee/eevee_motion_blur.c | 2 | ||||
-rw-r--r-- | source/blender/draw/engines/eevee/shaders/effect_velocity_resolve_frag.glsl | 15 |
2 files changed, 10 insertions, 7 deletions
diff --git a/source/blender/draw/engines/eevee/eevee_motion_blur.c b/source/blender/draw/engines/eevee/eevee_motion_blur.c index f60c2661cb0..fe3705f1fbb 100644 --- a/source/blender/draw/engines/eevee/eevee_motion_blur.c +++ b/source/blender/draw/engines/eevee/eevee_motion_blur.c @@ -490,7 +490,7 @@ void EEVEE_motion_blur_swap_data(EEVEE_Data *vedata) BLI_assert((effects->enabled_effects & EFFECT_MOTION_BLUR) != 0); /* Camera Data. */ - effects->motion_blur.camera[MB_PREV] = effects->motion_blur.camera[MB_CURR]; + effects->motion_blur.camera[MB_PREV] = effects->motion_blur.camera[MB_NEXT]; /* Object Data. */ for (BLI_ghashIterator_init(&ghi, effects->motion_blur.object); diff --git a/source/blender/draw/engines/eevee/shaders/effect_velocity_resolve_frag.glsl b/source/blender/draw/engines/eevee/shaders/effect_velocity_resolve_frag.glsl index 145939cefb2..9182171bab4 100644 --- a/source/blender/draw/engines/eevee/shaders/effect_velocity_resolve_frag.glsl +++ b/source/blender/draw/engines/eevee/shaders/effect_velocity_resolve_frag.glsl @@ -13,16 +13,19 @@ void main() { /* Extract pixel motion vector from camera movement. */ ivec2 texel = ivec2(gl_FragCoord.xy); - vec2 uv = gl_FragCoord.xy / vec2(textureSize(depthBuffer, 0).xy); + vec2 uv_curr = gl_FragCoord.xy / vec2(textureSize(depthBuffer, 0).xy); float depth = texelFetch(depthBuffer, texel, 0).r; - vec3 world_position = project_point(currViewProjMatrixInv, vec3(uv, depth) * 2.0 - 1.0); - vec2 uv_prev = project_point(prevViewProjMatrix, world_position).xy * 0.5 + 0.5; - vec2 uv_next = project_point(nextViewProjMatrix, world_position).xy * 0.5 + 0.5; + uv_curr = uv_curr * 2.0 - 1.0; + depth = depth * 2.0 - 1.0; - outData.xy = uv_prev - uv; - outData.zw = uv_next - uv; + vec3 world_position = project_point(currViewProjMatrixInv, vec3(uv_curr, depth)); + vec2 uv_prev = project_point(prevViewProjMatrix, world_position).xy; + vec2 uv_next = project_point(nextViewProjMatrix, world_position).xy; + + outData.xy = uv_prev - uv_curr; + outData.zw = uv_next - uv_curr; /* Encode to unsigned normalized 16bit texture. */ outData = outData * 0.5 + 0.5; |