Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClément Foucault <foucault.clem@gmail.com>2021-04-12 17:34:11 +0300
committerClément Foucault <foucault.clem@gmail.com>2021-04-12 22:34:08 +0300
commita4ae2b91c9dd41d06728bf66ea4a3ec320f5c904 (patch)
tree891af3e0e416f96456bd0646e7172699359017b2 /source/blender
parent0cd48960379e0acfecd00e95ce111f777a53d076 (diff)
EEVEE: Motion Blur: Add back post process motion blur
This is almost the same thing as old implementation. Differences: - We clamp the motion vectors to their maximum when sampling the velocity buffer. - Velocity rendering (and data manager) is separated from motion blur. This allows outputing the motion vector render pass and in the future use motion vectors to reproject older frames. - Vector render pass support (only if motion blur is disabled, just like cycles). - Velocity tiles are computed in one pass (simpler code, less CPU overhead, less VRAM usage, maybe a bit slower but imperceivable (< 0.3ms)). - Two velocity passes are outputed, one for motion blur fx (applied per shading view) and one for the vector pass. This could be optimized further in the future. - No current support for deformation & hair (to come).
Diffstat (limited to 'source/blender')
-rw-r--r--source/blender/draw/CMakeLists.txt9
-rw-r--r--source/blender/draw/engines/eevee/eevee_camera.hh14
-rw-r--r--source/blender/draw/engines/eevee/eevee_engine.c1
-rw-r--r--source/blender/draw/engines/eevee/eevee_film.hh1
-rw-r--r--source/blender/draw/engines/eevee/eevee_instance.hh17
-rw-r--r--source/blender/draw/engines/eevee/eevee_motion_blur.hh311
-rw-r--r--source/blender/draw/engines/eevee/eevee_renderpasses.hh14
-rw-r--r--source/blender/draw/engines/eevee/eevee_sampling.hh2
-rw-r--r--source/blender/draw/engines/eevee/eevee_shader.hh32
-rw-r--r--source/blender/draw/engines/eevee/eevee_shader_shared.hh36
-rw-r--r--source/blender/draw/engines/eevee/eevee_velocity.hh412
-rw-r--r--source/blender/draw/engines/eevee/eevee_view.hh67
-rw-r--r--source/blender/draw/engines/eevee/eevee_wrapper.hh33
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_camera_velocity_frag.glsl52
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_film_lib.glsl4
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_motion_blur_gather_frag.glsl220
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_motion_blur_lib.glsl18
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_dilate_frag.glsl113
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_flatten_frag.glsl51
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_object_forward_frag.glsl4
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_object_velocity_frag.glsl43
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_object_velocity_lib.glsl8
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_object_velocity_mesh_vert.glsl40
-rw-r--r--source/blender/draw/engines/eevee/shaders/eevee_velocity_lib.glsl38
24 files changed, 1395 insertions, 145 deletions
diff --git a/source/blender/draw/CMakeLists.txt b/source/blender/draw/CMakeLists.txt
index 5c07c94ffc6..5deb8620de8 100644
--- a/source/blender/draw/CMakeLists.txt
+++ b/source/blender/draw/CMakeLists.txt
@@ -177,6 +177,7 @@ set(LIB
)
data_to_c_simple(engines/eevee/shaders/eevee_camera_lib.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_camera_velocity_frag.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_depth_of_field_accumulator_lib.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_depth_of_field_bokeh_lut_frag.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_depth_of_field_filter_frag.glsl SRC)
@@ -196,6 +197,10 @@ data_to_c_simple(engines/eevee/shaders/eevee_depth_of_field_tiles_flatten_frag.g
data_to_c_simple(engines/eevee/shaders/eevee_film_filter_frag.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_film_lib.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_film_resolve_frag.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_motion_blur_gather_frag.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_motion_blur_lib.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_motion_blur_tiles_dilate_frag.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_motion_blur_tiles_flatten_frag.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_object_deferred_frag.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_object_depth_colored_frag.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_object_depth_peeled_frag.glsl SRC)
@@ -206,7 +211,11 @@ data_to_c_simple(engines/eevee/shaders/eevee_object_gpencil_vert.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_object_hair_vert.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_object_lib.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_object_mesh_vert.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_object_velocity_frag.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_object_velocity_lib.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_object_velocity_mesh_vert.glsl SRC)
data_to_c_simple(engines/eevee/shaders/eevee_sampling_lib.glsl SRC)
+data_to_c_simple(engines/eevee/shaders/eevee_velocity_lib.glsl SRC)
data_to_c_simple(engines/eevee/eevee_shader_shared.hh SRC)
diff --git a/source/blender/draw/engines/eevee/eevee_camera.hh b/source/blender/draw/engines/eevee/eevee_camera.hh
index 8da894030ac..5844df861ee 100644
--- a/source/blender/draw/engines/eevee/eevee_camera.hh
+++ b/source/blender/draw/engines/eevee/eevee_camera.hh
@@ -141,8 +141,6 @@ class Camera {
bool has_changed_ = true;
/** Detects wrong usage. */
bool synced_ = false;
- /** Last sample we synced with. Avoid double sync. */
- uint64_t last_sample_ = 0;
/** Original object of the camera. */
Object *camera_original_ = nullptr;
/** Evaluated camera object. Only valid after sync. */
@@ -180,7 +178,8 @@ class Camera {
data.type = DRW_view_is_persp_get(drw_view) ? CAMERA_PERSP : CAMERA_ORTHO;
}
- /* Sync early to detect changes. This is ok since we avoid double sync later. */
+ /* TODO Avoid double sync in viewport. */
+ /* Sync early to detect changes. */
this->sync(drw_view);
/* Detect changes in parameters. */
@@ -195,15 +194,6 @@ class Camera {
const Scene *scene = DEG_get_evaluated_scene(depsgraph_);
object_eval_ = DEG_get_evaluated_object(depsgraph_, camera_original_);
- uint64_t sample = sampling_.sample_get();
- if (last_sample_ != sample || !synced_) {
- last_sample_ = sample;
- }
- else {
- /* Avoid double sync. */
- return;
- }
-
CameraData &data = data_[data_id_];
data.filter_size = scene->r.gauss;
diff --git a/source/blender/draw/engines/eevee/eevee_engine.c b/source/blender/draw/engines/eevee/eevee_engine.c
index eafd53b8ba3..464687c7734 100644
--- a/source/blender/draw/engines/eevee/eevee_engine.c
+++ b/source/blender/draw/engines/eevee/eevee_engine.c
@@ -109,6 +109,7 @@ static void eevee_render_update_passes(RenderEngine *engine, Scene *scene, ViewL
CHECK_PASS_LEGACY(Z, SOCK_FLOAT, 1, "Z");
CHECK_PASS_LEGACY(MIST, SOCK_FLOAT, 1, "Z");
CHECK_PASS_LEGACY(NORMAL, SOCK_VECTOR, 3, "XYZ");
+ CHECK_PASS_LEGACY(VECTOR, SOCK_RGBA, 4, "RGBA");
CHECK_PASS_LEGACY(SHADOW, SOCK_RGBA, 3, "RGB");
CHECK_PASS_LEGACY(AO, SOCK_RGBA, 3, "RGB");
CHECK_PASS_LEGACY(DIFFUSE_COLOR, SOCK_RGBA, 3, "RGB");
diff --git a/source/blender/draw/engines/eevee/eevee_film.hh b/source/blender/draw/engines/eevee/eevee_film.hh
index b78a0fa82ee..bf7843d4453 100644
--- a/source/blender/draw/engines/eevee/eevee_film.hh
+++ b/source/blender/draw/engines/eevee/eevee_film.hh
@@ -52,6 +52,7 @@ static eGPUTextureFormat to_gpu_texture_format(eFilmDataType film_type)
default:
case FILM_DATA_COLOR_LOG:
case FILM_DATA_COLOR:
+ case FILM_DATA_MOTION:
case FILM_DATA_VEC4:
return GPU_RGBA16F;
case FILM_DATA_FLOAT:
diff --git a/source/blender/draw/engines/eevee/eevee_instance.hh b/source/blender/draw/engines/eevee/eevee_instance.hh
index d4a6a0439dd..0d0492b81fd 100644
--- a/source/blender/draw/engines/eevee/eevee_instance.hh
+++ b/source/blender/draw/engines/eevee/eevee_instance.hh
@@ -49,8 +49,10 @@ class Instance {
MainView main_view_;
/** Point of view in the scene. Can be init from viewport or camera object. */
Camera camera_;
+ /** Velocity module containing motion data. */
+ Velocity velocity_;
/** Motion blur data. */
- MotionBlur motion_blur_;
+ MotionBlurModule motion_blur_;
/** Lookdev own lightweight instance. May not be allocated. */
// Lookdev *lookdev_ = nullptr;
@@ -69,10 +71,11 @@ class Instance {
Instance(ShaderModule &shared_shaders)
: render_passes_(shared_shaders, camera_, sampling_),
shaders_(shared_shaders),
- shading_passes_(shared_shaders),
- main_view_(shared_shaders, shading_passes_, camera_, sampling_),
+ shading_passes_(shared_shaders, camera_, velocity_),
+ main_view_(shared_shaders, shading_passes_, camera_, sampling_, motion_blur_),
camera_(sampling_),
- motion_blur_(sampling_){};
+ velocity_(),
+ motion_blur_(camera_, sampling_, velocity_){};
~Instance(){};
/**
@@ -116,10 +119,11 @@ class Instance {
}
sampling_.init(scene_);
- motion_blur_.init(scene_, render, depsgraph_);
camera_.init(render_, depsgraph_, camera_object, drw_view_);
+ motion_blur_.init(scene_, render, depsgraph_);
render_passes_.init(scene_, render_layer, v3d_, output_res, output_rect);
main_view_.init(scene_, output_res);
+ velocity_.init(camera_, render_, depsgraph_, render_passes_);
}
/**
@@ -141,6 +145,7 @@ class Instance {
switch (ob->type) {
case OB_MESH:
shading_passes_.opaque.surface_add(ob, nullptr, 0);
+ shading_passes_.velocity.mesh_add(ob);
break;
default:
@@ -157,7 +162,7 @@ class Instance {
void end_sync(void)
{
- motion_blur_.end_sync();
+ velocity_.end_sync();
}
void render_sync(void)
diff --git a/source/blender/draw/engines/eevee/eevee_motion_blur.hh b/source/blender/draw/engines/eevee/eevee_motion_blur.hh
index a1f8d3858c5..735bd48e6fe 100644
--- a/source/blender/draw/engines/eevee/eevee_motion_blur.hh
+++ b/source/blender/draw/engines/eevee/eevee_motion_blur.hh
@@ -54,61 +54,21 @@
#include "eevee_sampling.hh"
#include "eevee_shader_shared.hh"
+#include "eevee_velocity.hh"
namespace blender::eevee {
-class MotionBlur {
- public:
- struct ViewStep {
- CameraData cam_data;
- };
+/* -------------------------------------------------------------------- */
+/** \name MotionBlur
+ *
+ * Common module. Manages timesteps evaluations and accumulation Motion blur.
+ * \{ */
+class MotionBlurModule {
private:
- enum eStep {
- STEP_PREVIOUS = 0,
- STEP_NEXT = 1,
- STEP_CURRENT = 2,
- };
-
- struct ObjectSteps {
- mat4 obmats[3];
- };
-
- struct HairSteps {
- /** Boolean used as uniform to disable deformation motion-blur just before drawing. */
- int use_deform;
- /** Position buffer for time = t +/- step. */
- struct GPUVertBuf *hair_pos[2] = {nullptr};
- /** Buffer Texture of the corresponding VBO. */
- struct GPUTexture *hair_pos_tx[2] = {nullptr};
- };
-
- struct GeometrySteps : public ObjectSteps {
- /** Boolean used as uniform to disable deformation motion-blur just before drawing. */
- int use_deform;
- /** Batch for time = t. */
- struct GPUBatch *batch = nullptr;
- /** Vbo for time = t +/- step. */
- struct GPUVertBuf *vbo[2] = {nullptr};
- /** Hair motion steps for particle systems. */
- Vector<HairSteps> psys;
- };
-
- /** Unique key to identify each object in the hashmap. */
- struct ObjectKey {
- /** Original Object or source object for duplis. */
- struct Object *ob = nullptr;
- /** Original Parent object for duplis. */
- struct Object *parent = nullptr;
- /** Dupli objects recursive unique identifier */
- int id[8] = {0}; /* MAX_DUPLI_RECUR */
- };
-
- ViewStep camera_steps[3];
- Map<ObjectKey, GeometrySteps *> geom_steps_;
- Map<ObjectKey, HairSteps *> hair_steps_;
-
Sampling &sampling_;
+ Velocity &velocity_;
+ Camera &camera_;
RenderEngine *engine_;
Depsgraph *depsgraph_;
@@ -127,17 +87,17 @@ class MotionBlur {
/** Copy of scene settings. */
int motion_blur_position_;
float motion_blur_shutter_;
+ float motion_blur_fx_depth_scale_;
- /** */
- bool use_fx_motion_blur = false;
bool enabled_ = false;
+ float motion_blur_fx_enabled_ = false;
- eStep step_type_ = STEP_CURRENT;
int step_id_ = 0;
public:
- MotionBlur(Sampling &sampling) : sampling_(sampling){};
- ~MotionBlur(){};
+ MotionBlurModule(Camera &camera, Sampling &sampling, Velocity &velocity)
+ : sampling_(sampling), velocity_(velocity), camera_(camera){};
+ ~MotionBlurModule(){};
void init(const Scene *scene, RenderEngine *engine, Depsgraph *depsgraph)
{
@@ -148,6 +108,7 @@ class MotionBlur {
enabled_ = false;
}
if (!enabled_) {
+ motion_blur_fx_enabled_ = false;
return;
}
@@ -173,28 +134,20 @@ class MotionBlur {
time = this->shutter_time_to_scene_time(time);
}
- // use_fx_motion_blur = scene->eevee.motion_blur_max > 0.0f;
- use_fx_motion_blur = false;
+ motion_blur_fx_enabled_ = scene->eevee.motion_blur_max > 0.5f;
step_id_ = 1;
engine_ = engine;
depsgraph_ = depsgraph;
- if (use_fx_motion_blur) {
+ if (motion_blur_fx_enabled_) {
/* A bit weird but we have to sync the first 2 steps here because the step()
- * function is only called. */
- this->step_sync(time_steps_[0], STEP_PREVIOUS);
- this->step_sync(time_steps_[2], STEP_NEXT);
- }
- this->set_time(time_steps_[1]);
- }
-
- /* Runs once per center time step. */
- void end_sync(void)
- {
- if (!enabled_) {
- return;
+ * function is only called rendering a sample. */
+ velocity_.step_sync(Velocity::STEP_PREVIOUS, camera_, engine_, depsgraph_, time_steps_[0]);
+ velocity_.step_sync(Velocity::STEP_NEXT, camera_, engine_, depsgraph_, time_steps_[2]);
}
+ float frame_time = time_steps_[1];
+ DRW_render_set_time(engine_, depsgraph_, floorf(frame_time), fractf(frame_time));
}
/* Runs after rendering a sample. */
@@ -212,51 +165,22 @@ class MotionBlur {
BLI_assert(time_steps_.size() > step_id_ + 2);
step_id_ += 2;
- if (use_fx_motion_blur) {
- this->step_swap();
- this->step_sync(time_steps_[step_id_ + 1], STEP_NEXT);
+ if (motion_blur_fx_enabled_) {
+ velocity_.step_swap();
+ velocity_.step_sync(
+ Velocity::STEP_NEXT, camera_, engine_, depsgraph_, time_steps_[step_id_ + 1]);
}
- step_type_ = STEP_CURRENT;
- this->set_time(time_steps_[step_id_]);
+ float frame_time = time_steps_[step_id_];
+ DRW_render_set_time(engine_, depsgraph_, floorf(frame_time), fractf(frame_time));
}
}
- private:
- /* Gather motion data from all objects in the scene. */
- static void step_object_sync(void *motion_blur_,
- Object *ob,
- RenderEngine *UNUSED(engine),
- Depsgraph *UNUSED(depsgraph))
+ bool blur_fx_enabled_get(void) const
{
- MotionBlur &mb = *reinterpret_cast<MotionBlur *>(motion_blur_);
- (void)mb;
-
- switch (ob->type) {
- case OB_MESH:
- break;
-
- default:
- break;
- }
- }
-
- void step_sync(float time, eStep step)
- {
- step_type_ = step;
- this->set_time(time);
- DRW_render_object_iter(this, engine_, depsgraph_, MotionBlur::step_object_sync);
- }
-
- /* Swaps next frame data */
- void step_swap()
- {
- }
-
- void set_time(float time)
- {
- DRW_render_set_time(engine_, depsgraph_, floorf(time), fractf(time));
+ return motion_blur_fx_enabled_;
}
+ private:
float shutter_time_to_scene_time(float time)
{
switch (motion_blur_position_) {
@@ -279,4 +203,177 @@ class MotionBlur {
}
};
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name MotionBlur
+ *
+ * Per view fx module. Perform a motion blur using the result of the velocity pass.
+ * \{ */
+
+class MotionBlur {
+ private:
+ ShaderModule &shaders_;
+ Sampling &sampling_;
+ MotionBlurModule &mb_module_;
+
+ StringRefNull view_name_;
+
+ /** Textures from pool. Not owned. */
+ GPUTexture *tiles_tx_ = nullptr;
+ GPUTexture *tiles_dilated_tx_ = nullptr;
+ /** Input texture. Not owned. */
+ GPUTexture *input_velocity_tx_ = nullptr;
+ GPUTexture *input_color_tx_ = nullptr;
+ GPUTexture *input_depth_tx_ = nullptr;
+ /** Passes. Not owned. */
+ DRWPass *tiles_flatten_ps_ = nullptr;
+ DRWPass *tiles_dilate_ps_ = nullptr;
+ DRWPass *gather_ps_ = nullptr;
+ /** Framebuffers. Owned. */
+ GPUFrameBuffer *tiles_flatten_fb_ = nullptr;
+ GPUFrameBuffer *tiles_dilate_fb_ = nullptr;
+ GPUFrameBuffer *gather_fb_ = nullptr;
+
+ StructBuffer<MotionBlurData> data_;
+
+ bool enabled_;
+
+ public:
+ MotionBlur(ShaderModule &shaders,
+ Sampling &sampling,
+ MotionBlurModule &mb_module,
+ StringRefNull view_name)
+ : shaders_(shaders), sampling_(sampling), mb_module_(mb_module), view_name_(view_name){};
+
+ ~MotionBlur()
+ {
+ GPU_FRAMEBUFFER_FREE_SAFE(tiles_flatten_fb_);
+ GPU_FRAMEBUFFER_FREE_SAFE(tiles_dilate_fb_);
+ GPU_FRAMEBUFFER_FREE_SAFE(gather_fb_);
+ }
+
+ void init(const Scene *scene)
+ {
+ data_.blur_max = scene->eevee.motion_blur_max;
+ data_.depth_scale = scene->eevee.motion_blur_depth_scale;
+ enabled_ = ((scene->eevee.flag & SCE_EEVEE_MOTION_BLUR_ENABLED) != 0) &&
+ (data_.blur_max > 0.5f);
+ }
+
+ void sync(int extent[2])
+ {
+ if (!enabled_) {
+ return;
+ }
+
+ DrawEngineType *owner = (DrawEngineType *)view_name_.c_str();
+ eGPUSamplerState no_filter = GPU_SAMPLER_DEFAULT;
+
+ uint res[2] = {divide_ceil_u(extent[0], MB_TILE_DIVISOR),
+ divide_ceil_u(extent[1], MB_TILE_DIVISOR)};
+
+ {
+ /* Create max velocity tiles in 2 passes. One for X and one for Y */
+ DRW_PASS_CREATE(tiles_flatten_ps_, DRW_STATE_WRITE_COLOR);
+ GPUShader *sh = shaders_.static_shader_get(MOTION_BLUR_TILE_FLATTEN);
+ DRWShadingGroup *grp = DRW_shgroup_create(sh, tiles_flatten_ps_);
+ DRW_shgroup_uniform_texture_ref_ex(grp, "velocity_tx", &input_velocity_tx_, no_filter);
+ DRW_shgroup_uniform_block(grp, "motion_blur_block", data_.ubo_get());
+ DRW_shgroup_call_procedural_triangles(grp, NULL, 1);
+
+ tiles_tx_ = DRW_texture_pool_query_2d(UNPACK2(res), GPU_RGBA16F, owner);
+
+ GPU_framebuffer_ensure_config(&tiles_flatten_fb_,
+ {
+ GPU_ATTACHMENT_NONE,
+ GPU_ATTACHMENT_TEXTURE(tiles_tx_),
+ });
+ }
+ {
+ /* Expand max tiles by keeping the max tile in each tile neighborhood. */
+ DRW_PASS_CREATE(tiles_dilate_ps_, DRW_STATE_WRITE_COLOR);
+ GPUShader *sh = shaders_.static_shader_get(MOTION_BLUR_TILE_DILATE);
+ DRWShadingGroup *grp = DRW_shgroup_create(sh, tiles_dilate_ps_);
+ DRW_shgroup_uniform_texture_ref_ex(grp, "tiles_tx", &tiles_tx_, no_filter);
+ DRW_shgroup_uniform_block(grp, "motion_blur_block", data_.ubo_get());
+ DRW_shgroup_call_procedural_triangles(grp, NULL, 1);
+
+ tiles_dilated_tx_ = DRW_texture_pool_query_2d(UNPACK2(res), GPU_RGBA16F, owner);
+
+ GPU_framebuffer_ensure_config(&tiles_dilate_fb_,
+ {
+ GPU_ATTACHMENT_NONE,
+ GPU_ATTACHMENT_TEXTURE(tiles_dilated_tx_),
+ });
+ }
+ {
+ data_.target_size_inv[0] = 1.0f / extent[0];
+ data_.target_size_inv[1] = 1.0f / extent[1];
+
+ /* Do the motion blur gather algorithm. */
+ DRW_PASS_CREATE(gather_ps_, DRW_STATE_WRITE_COLOR);
+ GPUShader *sh = shaders_.static_shader_get(MOTION_BLUR_GATHER);
+ DRWShadingGroup *grp = DRW_shgroup_create(sh, gather_ps_);
+ DRW_shgroup_uniform_block(grp, "sampling_block", sampling_.ubo_get());
+ DRW_shgroup_uniform_block(grp, "motion_blur_block", data_.ubo_get());
+ DRW_shgroup_uniform_texture_ref(grp, "color_tx", &input_color_tx_);
+ DRW_shgroup_uniform_texture_ref(grp, "depth_tx", &input_depth_tx_);
+ DRW_shgroup_uniform_texture_ref_ex(grp, "velocity_tx", &input_velocity_tx_, no_filter);
+ DRW_shgroup_uniform_texture_ref_ex(grp, "tiles_tx", &tiles_dilated_tx_, no_filter);
+
+ DRW_shgroup_call_procedural_triangles(grp, NULL, 1);
+ }
+
+ data_.is_viewport = !DRW_state_is_image_render();
+ data_.push_update();
+ }
+
+ void render(GPUTexture *depth_tx,
+ GPUTexture *velocity_tx,
+ GPUTexture **input_tx,
+ GPUTexture **output_tx)
+ {
+ if (!enabled_) {
+ return;
+ }
+
+ input_color_tx_ = *input_tx;
+ input_depth_tx_ = depth_tx;
+ input_velocity_tx_ = velocity_tx;
+
+ DRW_stats_group_start("Motion Blur");
+
+ GPU_framebuffer_bind(tiles_flatten_fb_);
+ DRW_draw_pass(tiles_flatten_ps_);
+
+ for (int max_blur = data_.blur_max; max_blur > 0; max_blur -= MB_TILE_DIVISOR) {
+ GPU_framebuffer_bind(tiles_dilate_fb_);
+ DRW_draw_pass(tiles_dilate_ps_);
+ SWAP(GPUTexture *, tiles_tx_, tiles_dilated_tx_);
+ SWAP(GPUFrameBuffer *, tiles_flatten_fb_, tiles_dilate_fb_);
+ }
+ /* Swap again so result is in tiles_dilated_tx_. */
+ SWAP(GPUTexture *, tiles_tx_, tiles_dilated_tx_);
+ SWAP(GPUFrameBuffer *, tiles_flatten_fb_, tiles_dilate_fb_);
+
+ GPU_framebuffer_ensure_config(&gather_fb_,
+ {
+ GPU_ATTACHMENT_NONE,
+ GPU_ATTACHMENT_TEXTURE(*output_tx),
+ });
+
+ GPU_framebuffer_bind(gather_fb_);
+ DRW_draw_pass(gather_ps_);
+
+ DRW_stats_group_end();
+
+ /* Swap buffers so that next effect has the right input. */
+ *input_tx = *output_tx;
+ *output_tx = input_color_tx_;
+ }
+};
+
+/** \} */
+
} // namespace blender::eevee
diff --git a/source/blender/draw/engines/eevee/eevee_renderpasses.hh b/source/blender/draw/engines/eevee/eevee_renderpasses.hh
index e2cf867bf99..d6b956092df 100644
--- a/source/blender/draw/engines/eevee/eevee_renderpasses.hh
+++ b/source/blender/draw/engines/eevee/eevee_renderpasses.hh
@@ -37,6 +37,7 @@ enum eRenderPassBit {
RENDERPASS_COMBINED = (1 << 0),
RENDERPASS_DEPTH = (1 << 1),
RENDERPASS_NORMAL = (1 << 2),
+ RENDERPASS_VECTOR = (1 << 3),
/** Used for iterator. */
RENDERPASS_MAX,
};
@@ -49,6 +50,7 @@ static eRenderPassBit to_render_passes_bits(int i_rpasses)
SET_FLAG_FROM_TEST(rpasses, i_rpasses & SCE_PASS_COMBINED, RENDERPASS_COMBINED);
SET_FLAG_FROM_TEST(rpasses, i_rpasses & SCE_PASS_Z, RENDERPASS_DEPTH);
SET_FLAG_FROM_TEST(rpasses, i_rpasses & SCE_PASS_NORMAL, RENDERPASS_NORMAL);
+ SET_FLAG_FROM_TEST(rpasses, i_rpasses & SCE_PASS_VECTOR, RENDERPASS_VECTOR);
return rpasses;
}
@@ -61,6 +63,8 @@ static const char *to_render_passes_name(eRenderPassBit rpass)
return RE_PASSNAME_Z;
case RENDERPASS_NORMAL:
return RE_PASSNAME_NORMAL;
+ case RENDERPASS_VECTOR:
+ return RE_PASSNAME_VECTOR;
default:
BLI_assert(0);
return "";
@@ -76,6 +80,8 @@ static eFilmDataType to_render_passes_data_type(eRenderPassBit rpass, const bool
return FILM_DATA_DEPTH;
case RENDERPASS_NORMAL:
return FILM_DATA_NORMAL;
+ case RENDERPASS_VECTOR:
+ return FILM_DATA_MOTION;
default:
BLI_assert(0);
return FILM_DATA_COLOR;
@@ -94,6 +100,7 @@ class RenderPasses {
Film *combined = nullptr;
Film *depth = nullptr;
Film *normal = nullptr;
+ Film *vector = nullptr;
Vector<Film *> aovs;
private:
@@ -111,6 +118,7 @@ class RenderPasses {
delete combined;
delete depth;
delete normal;
+ delete vector;
}
void init(const Scene *scene,
@@ -121,6 +129,10 @@ class RenderPasses {
{
if (render_layer) {
enabled_passes_ = to_render_passes_bits(render_layer->passflag);
+ /* Cannot output motion vectors when using motion blur. */
+ if (scene->eevee.flag & SCE_EEVEE_MOTION_BLUR_ENABLED) {
+ enabled_passes_ &= ~RENDERPASS_VECTOR;
+ }
}
else {
BLI_assert(v3d);
@@ -220,6 +232,8 @@ class RenderPasses {
return depth;
case RENDERPASS_NORMAL:
return normal;
+ case RENDERPASS_VECTOR:
+ return vector;
default:
BLI_assert(0);
return combined;
diff --git a/source/blender/draw/engines/eevee/eevee_sampling.hh b/source/blender/draw/engines/eevee/eevee_sampling.hh
index 643c5182c77..9f8bebca9b5 100644
--- a/source/blender/draw/engines/eevee/eevee_sampling.hh
+++ b/source/blender/draw/engines/eevee/eevee_sampling.hh
@@ -122,6 +122,8 @@ class Sampling {
BLI_halton_2d(primes, offset, sample_, r);
data_.dimensions[SAMPLING_FILTER_U][0] = r[0];
data_.dimensions[SAMPLING_FILTER_V][0] = r[1];
+ /* TODO decorelate. */
+ data_.dimensions[SAMPLING_TIME][0] = r[0];
}
{
double r[2], offset[2];
diff --git a/source/blender/draw/engines/eevee/eevee_shader.hh b/source/blender/draw/engines/eevee/eevee_shader.hh
index 4147f50e336..758edf49a04 100644
--- a/source/blender/draw/engines/eevee/eevee_shader.hh
+++ b/source/blender/draw/engines/eevee/eevee_shader.hh
@@ -39,6 +39,7 @@ extern char datatoc_common_math_lib_glsl[];
extern char datatoc_common_view_lib_glsl[];
extern char datatoc_eevee_camera_lib_glsl[];
+extern char datatoc_eevee_camera_velocity_frag_glsl[];
extern char datatoc_eevee_depth_of_field_accumulator_lib_glsl[];
extern char datatoc_eevee_depth_of_field_bokeh_lut_frag_glsl[];
extern char datatoc_eevee_depth_of_field_downsample_frag_glsl[];
@@ -59,10 +60,18 @@ extern char datatoc_eevee_depth_of_field_tiles_flatten_frag_glsl[];
extern char datatoc_eevee_film_filter_frag_glsl[];
extern char datatoc_eevee_film_lib_glsl[];
extern char datatoc_eevee_film_resolve_frag_glsl[];
+extern char datatoc_eevee_motion_blur_gather_frag_glsl[];
+extern char datatoc_eevee_motion_blur_lib_glsl[];
+extern char datatoc_eevee_motion_blur_tiles_dilate_frag_glsl[];
+extern char datatoc_eevee_motion_blur_tiles_flatten_frag_glsl[];
extern char datatoc_eevee_object_forward_frag_glsl[];
extern char datatoc_eevee_object_lib_glsl[];
extern char datatoc_eevee_object_mesh_vert_glsl[];
+extern char datatoc_eevee_object_velocity_frag_glsl[];
+extern char datatoc_eevee_object_velocity_lib_glsl[];
+extern char datatoc_eevee_object_velocity_mesh_vert_glsl[];
extern char datatoc_eevee_sampling_lib_glsl[];
+extern char datatoc_eevee_velocity_lib_glsl[];
extern char datatoc_eevee_shader_shared_hh[];
@@ -98,6 +107,13 @@ enum eShaderType {
MESH, /* TEST */
+ MOTION_BLUR_GATHER,
+ MOTION_BLUR_TILE_DILATE,
+ MOTION_BLUR_TILE_FLATTEN,
+
+ VELOCITY_CAMERA,
+ VELOCITY_MESH,
+
MAX_SHADER_TYPE,
};
@@ -134,11 +150,14 @@ class ShaderModule {
DRW_SHADER_LIB_ADD(shader_lib_, common_view_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_sampling_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_camera_lib);
+ DRW_SHADER_LIB_ADD(shader_lib_, eevee_velocity_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_depth_of_field_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_depth_of_field_accumulator_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_depth_of_field_scatter_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_film_lib);
+ DRW_SHADER_LIB_ADD(shader_lib_, eevee_motion_blur_lib);
DRW_SHADER_LIB_ADD(shader_lib_, eevee_object_lib);
+ DRW_SHADER_LIB_ADD(shader_lib_, eevee_object_velocity_lib);
/* Meh ¯\_(ツ)_/¯. */
char *datatoc_nullptr_glsl = nullptr;
@@ -236,6 +255,17 @@ class ShaderModule {
SHADER_FULLSCREEN(DOF_TILES_FLATTEN, eevee_depth_of_field_tiles_flatten_frag);
SHADER(MESH, eevee_object_mesh_vert, nullptr, eevee_object_forward_frag, nullptr);
+ SHADER_FULLSCREEN(MOTION_BLUR_GATHER, eevee_motion_blur_gather_frag);
+ SHADER_FULLSCREEN(MOTION_BLUR_TILE_DILATE, eevee_motion_blur_tiles_dilate_frag);
+ SHADER_FULLSCREEN(MOTION_BLUR_TILE_FLATTEN, eevee_motion_blur_tiles_flatten_frag);
+
+ SHADER(VELOCITY_MESH,
+ eevee_object_velocity_mesh_vert,
+ nullptr,
+ eevee_object_velocity_frag,
+ nullptr);
+ SHADER_FULLSCREEN(VELOCITY_CAMERA, eevee_camera_velocity_frag);
+
#undef SHADER
#undef SHADER_FULLSCREEN
@@ -280,7 +310,7 @@ class ShaderModule {
std::string enum_preprocess(const char *input)
{
std::string output = "";
- /* Not failure safe but this is only ran on static data. */
+ /* Not failure safe but this only runs on static data. */
const char *cursor = input;
while ((cursor = strstr(cursor, "enum "))) {
output += StringRef(input, cursor - input);
diff --git a/source/blender/draw/engines/eevee/eevee_shader_shared.hh b/source/blender/draw/engines/eevee/eevee_shader_shared.hh
index d8f92ed644a..41a31adab0b 100644
--- a/source/blender/draw/engines/eevee/eevee_shader_shared.hh
+++ b/source/blender/draw/engines/eevee/eevee_shader_shared.hh
@@ -157,7 +157,8 @@ enum eFilmDataType : uint32_t {
/** No VEC3 because GPU_RGB16F is not a renderable format. */
FILM_DATA_VEC4 = 4u,
FILM_DATA_NORMAL = 5u,
- FILM_DATA_DEPTH = 6u
+ FILM_DATA_DEPTH = 6u,
+ FILM_DATA_MOTION = 7u
};
struct FilmData {
@@ -266,6 +267,39 @@ static float circle_to_polygon_angle(float sides_count, float theta)
/** \} */
+/* -------------------------------------------------------------------- */
+/** \name Velocity
+ * \{ */
+
+struct VelocityObjectData {
+ mat4 next_object_mat;
+ mat4 prev_object_mat;
+};
+BLI_STATIC_ASSERT_ALIGN(VelocityObjectData, 16)
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name Motion Blur
+ * \{ */
+
+#define MB_TILE_DIVISOR 32
+
+struct MotionBlurData {
+ /** Motion vector lengths are clamped to this maximum. A value of 0 means effect is bypassed. */
+ float blur_max;
+ /** Depth scaling factor. Avoid bluring background behind moving objects. */
+ float depth_scale;
+ /** As the name suggests. Used to avoid a division in the sampling. */
+ vec2 target_size_inv;
+ /** Viewport motion blur only blurs using previous frame vectors. */
+ bool is_viewport;
+ int pad0_, pad1_, pad2_;
+};
+BLI_STATIC_ASSERT_ALIGN(MotionBlurData, 16)
+
+/** \} */
+
#ifdef __cplusplus
# undef bool
} // namespace blender::eevee
diff --git a/source/blender/draw/engines/eevee/eevee_velocity.hh b/source/blender/draw/engines/eevee/eevee_velocity.hh
new file mode 100644
index 00000000000..24271931c98
--- /dev/null
+++ b/source/blender/draw/engines/eevee/eevee_velocity.hh
@@ -0,0 +1,412 @@
+/*
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Copyright 2021, Blender Foundation.
+ */
+
+/** \file
+ * \ingroup eevee
+ *
+ * The velocity pass outputs motion vectors to use for either
+ * temporal re-projection or motion blur.
+ *
+ * It is the module that tracks the objects between frames updates.
+ */
+
+#pragma once
+
+#include "BKE_duplilist.h"
+#include "BKE_object.h"
+#include "BLI_map.hh"
+#include "DEG_depsgraph_query.h"
+#include "DNA_rigidbody_types.h"
+#include "GPU_framebuffer.h"
+
+#include "eevee_renderpasses.hh"
+#include "eevee_shader.hh"
+#include "eevee_shader_shared.hh"
+#include "eevee_wrapper.hh"
+
+namespace blender::eevee {
+
+/* -------------------------------------------------------------------- */
+/** \name ObjectKey
+ *
+ * Unique key to be able to match object across frame updates.
+ * \{ */
+
+/** Unique key to identify each object in the hashmap. */
+struct ObjectKey {
+ /** Original Object or source object for duplis. */
+ Object *ob;
+ /** Original Parent object for duplis. */
+ Object *parent;
+ /** Dupli objects recursive unique identifier */
+ int id[8]; /* MAX_DUPLI_RECUR */
+ /** If object uses particle system hair. */
+ bool use_particle_hair;
+
+ ObjectKey(Object *ob_,
+ Object *parent_,
+ int id_[8], /* MAX_DUPLI_RECUR */
+ bool use_particle_hair_)
+ : ob(ob_), parent(parent_), use_particle_hair(use_particle_hair_)
+ {
+ if (id_) {
+ memcpy(id, id_, sizeof(id));
+ }
+ else {
+ memset(id, 0, sizeof(id));
+ }
+ }
+
+ ObjectKey(Object *ob, DupliObject *dupli, Object *parent)
+ : ObjectKey(ob, parent, dupli ? dupli->persistent_id : nullptr, false){};
+
+ ObjectKey(Object *ob)
+ : ObjectKey(ob, DRW_object_get_dupli(ob), DRW_object_get_dupli_parent(ob)){};
+
+ uint64_t hash(void) const
+ {
+ uint64_t hash = BLI_ghashutil_ptrhash(ob);
+ hash = BLI_ghashutil_combine_hash(hash, BLI_ghashutil_ptrhash(parent));
+ for (int i = 0; i < MAX_DUPLI_RECUR; i++) {
+ if (id[i] != 0) {
+ hash = BLI_ghashutil_combine_hash(hash, BLI_ghashutil_inthash(id[i]));
+ }
+ else {
+ break;
+ }
+ }
+ return hash;
+ }
+
+ bool operator<(const ObjectKey &k) const
+ {
+ if (ob != k.ob) {
+ return (ob < k.ob);
+ }
+ if (parent != k.parent) {
+ return (parent < k.parent);
+ }
+ if (use_particle_hair != k.use_particle_hair) {
+ return (use_particle_hair < k.use_particle_hair);
+ }
+ return memcmp(id, k.id, sizeof(id)) < 0;
+ }
+
+ bool operator==(const ObjectKey &k) const
+ {
+ if (ob != k.ob) {
+ return false;
+ }
+ if (parent != k.parent) {
+ return false;
+ }
+ if (use_particle_hair != k.use_particle_hair) {
+ return false;
+ }
+ return memcmp(id, k.id, sizeof(id)) == 0;
+ }
+};
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name Velocity
+ *
+ * Container for scene velocity data.
+ * \{ */
+
+using VelocityObjectBuf = StructBuffer<VelocityObjectData>;
+
+class Velocity {
+ public:
+ enum eStep {
+ STEP_PREVIOUS = 0,
+ STEP_NEXT = 1,
+ STEP_CURRENT = 2,
+ };
+
+ /** Map an object key to a velocity data. */
+ Map<ObjectKey, VelocityObjectBuf *> objects_steps;
+ struct {
+ /** Copies of camera data. One for previous and one for next time step. */
+ StructBuffer<CameraData> prev, next;
+ } camera_step;
+
+ private:
+ Scene *scene_;
+ eStep step_;
+
+ /** True if velocity is computed for viewport. */
+ bool is_viewport_;
+
+ public:
+ Velocity(){};
+
+ ~Velocity()
+ {
+ for (VelocityObjectBuf *data : objects_steps.values()) {
+ delete data;
+ }
+ }
+
+ void init(Camera &camera,
+ RenderEngine *engine,
+ Depsgraph *depsgraph,
+ const RenderPasses &rpasses)
+ {
+ is_viewport_ = !DRW_state_is_image_render() && !DRW_state_is_opengl_render();
+
+ if (is_viewport_) {
+ /* For viewport we sync when object is evaluated and we swap at init time.
+ * Note that we sync the camera in init so we need camera to have been synced beforehand.
+ * Use next step to store the current position. This one will become the previous step after
+ * next swapping. */
+ step_ = STEP_NEXT;
+ step_swap();
+ /* TODO(fclem) we should garbage collect the ids that gets removed. */
+ step_camera_sync(camera);
+ }
+
+ if (engine && (rpasses.vector != nullptr)) {
+ /* No motion blur and the vector pass was requested. Do the step sync here. */
+ Scene *scene = DEG_get_evaluated_scene(depsgraph);
+ float initial_time = scene->r.cfra + scene->r.subframe;
+ step_sync(STEP_PREVIOUS, camera, engine, depsgraph, initial_time - 1.0f);
+ step_sync(STEP_NEXT, camera, engine, depsgraph, initial_time + 1.0f);
+ DRW_render_set_time(engine, depsgraph, floorf(initial_time), fractf(initial_time));
+ }
+ }
+
+ void step_sync(
+ eStep step, Camera &camera, RenderEngine *engine, Depsgraph *depsgraph, float time)
+ {
+ DRW_render_set_time(engine, depsgraph, floorf(time), fractf(time));
+ step_ = step;
+ scene_ = DEG_get_evaluated_scene(depsgraph);
+ step_camera_sync(camera);
+ DRW_render_object_iter(this, engine, depsgraph, Velocity::step_object_sync);
+ }
+
+ void step_camera_sync(Camera &camera)
+ {
+ if (!is_viewport_) {
+ /* No need to sync since it is already. */
+ camera.sync(nullptr);
+ }
+
+ if (step_ == STEP_NEXT) {
+ camera_step.next = camera.data_get();
+ }
+ else if (step_ == STEP_PREVIOUS) {
+ camera_step.prev = camera.data_get();
+ }
+ }
+
+ /* Gather motion data from all objects in the scene. */
+ static void step_object_sync(void *velocity_,
+ Object *ob,
+ RenderEngine *UNUSED(engine),
+ Depsgraph *UNUSED(depsgraph))
+ {
+ Velocity &velocity = *reinterpret_cast<Velocity *>(velocity_);
+
+ if (!velocity.object_has_velocity(ob) && !velocity.object_is_deform(ob)) {
+ return;
+ }
+
+ auto data = velocity.objects_steps.lookup_or_add_cb(ObjectKey(ob),
+ []() { return new VelocityObjectBuf(); });
+
+ if (velocity.step_ == STEP_NEXT) {
+ copy_m4_m4(data->next_object_mat, ob->obmat);
+ }
+ else if (velocity.step_ == STEP_PREVIOUS) {
+ copy_m4_m4(data->prev_object_mat, ob->obmat);
+ }
+ }
+
+ /* Moves next frame data to previous frame data. Nullify next frame data. */
+ void step_swap(void)
+ {
+ for (VelocityObjectBuf *data : objects_steps.values()) {
+ copy_m4_m4(data->prev_object_mat, data->next_object_mat);
+ /* Important: This let us known if object is missing from the next time step. */
+ zero_m4(data->next_object_mat);
+ }
+ camera_step.prev = static_cast<CameraData>(camera_step.next);
+ }
+
+ /* This is the end of the current frame sync. Not the step_sync. */
+ void end_sync(void)
+ {
+ for (VelocityObjectBuf *data : objects_steps.values()) {
+ data->push_update();
+ }
+ camera_step.prev.push_update();
+ camera_step.next.push_update();
+ }
+
+ private:
+ bool object_has_velocity(const Object *ob)
+ {
+#if 0
+ RigidBodyOb *rbo = ob->rigidbody_object;
+ /* Active rigidbody objects only, as only those are affected by sim. */
+ const bool has_rigidbody = (rbo && (rbo->type == RBO_TYPE_ACTIVE));
+ /* For now we assume dupli objects are moving. */
+ const bool is_dupli = (ob->base_flag & BASE_FROM_DUPLI) != 0;
+ const bool object_moves = is_dupli || has_rigidbody || BKE_object_moves_in_time(ob, true);
+#else
+ UNUSED_VARS(ob);
+ /* BKE_object_moves_in_time does not work in some cases.
+ * Better detect non moving object after evaluation. */
+ const bool object_moves = true;
+#endif
+ return object_moves;
+ }
+
+ bool object_is_deform(const Object *ob)
+ {
+ RigidBodyOb *rbo = ob->rigidbody_object;
+ /* Active rigidbody objects only, as only those are affected by sim. */
+ const bool has_rigidbody = (rbo && (rbo->type == RBO_TYPE_ACTIVE));
+ const bool is_deform = BKE_object_is_deform_modified(scene_, (Object *)ob) ||
+ (has_rigidbody && (rbo->flag & RBO_FLAG_USE_DEFORM) != 0);
+
+ return is_deform;
+ }
+};
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name VelocityPass
+ *
+ * Draws velocity data from Velocity module to a framebuffer / texture.
+ * \{ */
+
+class VelocityPass {
+ private:
+ ShaderModule &shaders_;
+ Camera &camera_;
+ Velocity &velocity_;
+
+ DRWPass *object_ps_ = nullptr;
+ DRWPass *camera_ps_ = nullptr;
+
+ /** Shading groups from object_ps_ */
+ DRWShadingGroup *mesh_grp_;
+
+ /** Reference only. Not owned. */
+ GPUTexture *depth_tx_;
+
+ public:
+ VelocityPass(ShaderModule &shaders, Camera &camera, Velocity &velocity)
+ : shaders_(shaders), camera_(camera), velocity_(velocity){};
+
+ void sync(void)
+ {
+ {
+ /* Outputs camera motion vector. */
+ /* TODO(fclem) Ideally, we should run this only where the motion vectors were not written.
+ * But without imageLoadStore, we cannot do that without another buffer. */
+ DRWState state = DRW_STATE_WRITE_COLOR;
+ DRW_PASS_CREATE(camera_ps_, state);
+ GPUShader *sh = shaders_.static_shader_get(VELOCITY_CAMERA);
+ DRWShadingGroup *grp = DRW_shgroup_create(sh, camera_ps_);
+ DRW_shgroup_uniform_texture_ref(grp, "depth_tx", &depth_tx_);
+ DRW_shgroup_uniform_block(grp, "camera_prev_block", velocity_.camera_step.prev.ubo_get());
+ DRW_shgroup_uniform_block(grp, "camera_next_block", velocity_.camera_step.next.ubo_get());
+ DRW_shgroup_uniform_block(grp, "camera_curr_block", camera_.ubo_get());
+ DRW_shgroup_call_procedural_triangles(grp, nullptr, 1);
+ }
+ {
+ /* Animated objects are rendered and output the correct motion vector. */
+ DRWState state = DRW_STATE_WRITE_COLOR | DRW_STATE_DEPTH_EQUAL;
+ DRW_PASS_CREATE(object_ps_, state);
+ {
+ GPUShader *sh = shaders_.static_shader_get(VELOCITY_MESH);
+ DRWShadingGroup *grp = mesh_grp_ = DRW_shgroup_create(sh, object_ps_);
+ DRW_shgroup_uniform_block(grp, "camera_prev_block", velocity_.camera_step.prev.ubo_get());
+ DRW_shgroup_uniform_block(grp, "camera_next_block", velocity_.camera_step.next.ubo_get());
+ DRW_shgroup_uniform_block(grp, "camera_curr_block", camera_.ubo_get());
+ }
+ }
+ }
+
+ void mesh_add(Object *ob)
+ {
+ VelocityObjectBuf **data_ptr = velocity_.objects_steps.lookup_ptr(ObjectKey(ob));
+
+ if (data_ptr == nullptr) {
+ return;
+ }
+
+ VelocityObjectBuf *data = *data_ptr;
+
+ GPUBatch *geom = DRW_cache_object_surface_get(ob);
+ if (geom == NULL) {
+ return;
+ }
+
+ /* Fill missing matrices if the object was hidden in previous or next frame. */
+ if (is_zero_m4(data->prev_object_mat)) {
+ copy_m4_m4(data->prev_object_mat, ob->obmat);
+ }
+ if (is_zero_m4(data->next_object_mat)) {
+ copy_m4_m4(data->next_object_mat, ob->obmat);
+ }
+
+ // if (mb_geom->use_deform) {
+ // /* Keep to modify later (after init). */
+ // mb_geom->batch = geom;
+ // }
+
+ /* Avoid drawing object that has no motions since object_moves is always true. */
+ if (/* !mb_geom->use_deform && */ /* Object deformation can happen without transform. */
+ equals_m4m4(data->prev_object_mat, ob->obmat) &&
+ equals_m4m4(data->next_object_mat, ob->obmat)) {
+ return;
+ }
+
+ /* TODO(fclem) Use the same layout as modelBlock from draw so we can reuse the same offset and
+ * avoid the overhead of 1 shading group and one UBO per object. */
+ DRWShadingGroup *grp = DRW_shgroup_create_sub(mesh_grp_);
+ DRW_shgroup_uniform_block(grp, "object_block", data->ubo_get());
+ DRW_shgroup_call(grp, geom, ob);
+ }
+
+ void render(GPUTexture *depth_tx, GPUFrameBuffer *velocity_only_fb, GPUFrameBuffer *velocity_fb)
+ {
+ depth_tx_ = depth_tx;
+
+ DRW_stats_group_start("Velocity");
+
+ GPU_framebuffer_bind(velocity_only_fb);
+ DRW_draw_pass(camera_ps_);
+
+ GPU_framebuffer_bind(velocity_fb);
+ DRW_draw_pass(object_ps_);
+
+ DRW_stats_group_end();
+ }
+};
+
+/** \} */
+
+} // namespace blender::eevee
diff --git a/source/blender/draw/engines/eevee/eevee_view.hh b/source/blender/draw/engines/eevee/eevee_view.hh
index 4f281ee74b2..44399a610d8 100644
--- a/source/blender/draw/engines/eevee/eevee_view.hh
+++ b/source/blender/draw/engines/eevee/eevee_view.hh
@@ -39,6 +39,7 @@
#include "eevee_depth_of_field.hh"
#include "eevee_renderpasses.hh"
#include "eevee_shader.hh"
+#include "eevee_velocity.hh"
namespace blender::eevee {
@@ -81,13 +82,16 @@ class ShadingPasses {
public:
// BackgroundShadingPass background;
DeferredPass opaque;
+ VelocityPass velocity;
public:
- ShadingPasses(ShaderModule &shaders) : opaque(shaders){};
+ ShadingPasses(ShaderModule &shaders, Camera &camera, Velocity &velocity)
+ : opaque(shaders), velocity(shaders, camera, velocity){};
void sync()
{
opaque.sync();
+ velocity.sync();
}
};
@@ -106,15 +110,20 @@ class ShadingView {
ShadingPasses &shading_passes_;
/** Associated camera. */
const Camera &camera_;
- /** Depth of field module. */
+ /** Post-fx modules. */
DepthOfField dof_;
+ MotionBlur mb_;
/** Owned resources. */
GPUFrameBuffer *view_fb_ = nullptr;
+ GPUFrameBuffer *velocity_fb_ = nullptr;
+ GPUFrameBuffer *velocity_only_fb_ = nullptr;
/** Draw resources. Not owned. */
GPUTexture *combined_tx_ = nullptr;
GPUTexture *depth_tx_ = nullptr;
GPUTexture *postfx_tx_ = nullptr;
+ GPUTexture *velocity_camera_tx_ = nullptr;
+ GPUTexture *velocity_view_tx_ = nullptr;
/** Main views is created from the camera (or is from the viewport). It is not jittered. */
DRWView *main_view_ = nullptr;
@@ -138,23 +147,28 @@ class ShadingView {
ShadingPasses &shading_passes,
Sampling &sampling,
const Camera &camera,
+ MotionBlurModule &mb_module,
const char *name,
const float (*face_matrix)[4])
: sampling_(sampling),
shading_passes_(shading_passes),
camera_(camera),
dof_(shaders, sampling, name),
+ mb_(shaders, sampling, mb_module, name),
name_(name),
face_matrix_(face_matrix){};
~ShadingView()
{
- GPU_framebuffer_free(view_fb_);
+ GPU_FRAMEBUFFER_FREE_SAFE(view_fb_);
+ GPU_FRAMEBUFFER_FREE_SAFE(velocity_fb_);
+ GPU_FRAMEBUFFER_FREE_SAFE(velocity_only_fb_);
}
void init(const Scene *scene)
{
dof_.init(scene);
+ mb_.init(scene);
}
void sync(int render_extent_[2])
@@ -200,6 +214,7 @@ class ShadingView {
render_view_ = DRW_view_create_sub(main_view_, viewmat_p, winmat_p);
dof_.sync(camera_, winmat_p, extent_);
+ mb_.sync(extent_);
{
/* Query temp textures and create framebuffers. */
@@ -211,12 +226,31 @@ class ShadingView {
combined_tx_ = DRW_texture_pool_query_2d(UNPACK2(extent_), GPU_RGBA16F, owner);
/* TODO(fclem) Only allocate if needed. */
postfx_tx_ = DRW_texture_pool_query_2d(UNPACK2(extent_), GPU_RGBA16F, owner);
+ /* TODO(fclem) Only allocate if needed. RG16F when only doing reprojection. */
+ velocity_camera_tx_ = DRW_texture_pool_query_2d(UNPACK2(extent_), GPU_RGBA16F, owner);
+ /* TODO(fclem) Only allocate if needed. RG16F when only doing motion blur post fx in
+ * panoramic camera. */
+ velocity_view_tx_ = DRW_texture_pool_query_2d(UNPACK2(extent_), GPU_RGBA16F, owner);
GPU_framebuffer_ensure_config(&view_fb_,
{
GPU_ATTACHMENT_TEXTURE(depth_tx_),
GPU_ATTACHMENT_TEXTURE(combined_tx_),
});
+
+ GPU_framebuffer_ensure_config(&velocity_fb_,
+ {
+ GPU_ATTACHMENT_TEXTURE(depth_tx_),
+ GPU_ATTACHMENT_TEXTURE(velocity_camera_tx_),
+ GPU_ATTACHMENT_TEXTURE(velocity_view_tx_),
+ });
+
+ GPU_framebuffer_ensure_config(&velocity_only_fb_,
+ {
+ GPU_ATTACHMENT_NONE,
+ GPU_ATTACHMENT_TEXTURE(velocity_camera_tx_),
+ GPU_ATTACHMENT_TEXTURE(velocity_view_tx_),
+ });
}
}
@@ -237,6 +271,12 @@ class ShadingView {
GPU_framebuffer_clear_color_depth(view_fb_, color, 1.0f);
shading_passes_.opaque.render();
+ shading_passes_.velocity.render(depth_tx_, velocity_only_fb_, velocity_fb_);
+
+ if (render_passes.vector) {
+ render_passes.vector->accumulate(velocity_camera_tx_, sub_view_);
+ }
+
GPUTexture *final_radiance_tx = render_post(combined_tx_);
if (render_passes.combined) {
@@ -252,9 +292,12 @@ class ShadingView {
GPUTexture *render_post(GPUTexture *input_tx)
{
+ GPUTexture *velocity_tx = (velocity_view_tx_ != nullptr) ? velocity_view_tx_ :
+ velocity_camera_tx_;
GPUTexture *output_tx = postfx_tx_;
/* Swapping is done internally. Actual output is set to the next input. */
dof_.render(depth_tx_, &input_tx, &output_tx);
+ mb_.render(depth_tx_, velocity_tx, &input_tx, &output_tx);
return input_tx;
}
@@ -295,14 +338,18 @@ class MainView {
int render_extent_[2];
public:
- MainView(ShaderModule &shaders, ShadingPasses &shpasses, Camera &cam, Sampling &sampling)
+ MainView(ShaderModule &shaders,
+ ShadingPasses &shpasses,
+ Camera &cam,
+ Sampling &sampling,
+ MotionBlurModule &mb_module)
: shading_views_({
- ShadingView(shaders, shpasses, sampling, cam, "posX_view", cubeface_mat[0]),
- ShadingView(shaders, shpasses, sampling, cam, "negX_view", cubeface_mat[1]),
- ShadingView(shaders, shpasses, sampling, cam, "posY_view", cubeface_mat[2]),
- ShadingView(shaders, shpasses, sampling, cam, "negY_view", cubeface_mat[3]),
- ShadingView(shaders, shpasses, sampling, cam, "posZ_view", cubeface_mat[4]),
- ShadingView(shaders, shpasses, sampling, cam, "negZ_view", cubeface_mat[5]),
+ ShadingView(shaders, shpasses, sampling, cam, mb_module, "posX_view", cubeface_mat[0]),
+ ShadingView(shaders, shpasses, sampling, cam, mb_module, "negX_view", cubeface_mat[1]),
+ ShadingView(shaders, shpasses, sampling, cam, mb_module, "posY_view", cubeface_mat[2]),
+ ShadingView(shaders, shpasses, sampling, cam, mb_module, "negY_view", cubeface_mat[3]),
+ ShadingView(shaders, shpasses, sampling, cam, mb_module, "posZ_view", cubeface_mat[4]),
+ ShadingView(shaders, shpasses, sampling, cam, mb_module, "negZ_view", cubeface_mat[5]),
})
{
}
diff --git a/source/blender/draw/engines/eevee/eevee_wrapper.hh b/source/blender/draw/engines/eevee/eevee_wrapper.hh
index 9617bbd9af0..6806847ac76 100644
--- a/source/blender/draw/engines/eevee/eevee_wrapper.hh
+++ b/source/blender/draw/engines/eevee/eevee_wrapper.hh
@@ -42,7 +42,7 @@ class StructArrayBuffer {
public:
StructArrayBuffer()
{
- ubo_ = GPU_uniformbuf_create_ex(sizeof(data_), nullptr, STRINGIFY(T));
+ ubo_ = GPU_uniformbuf_create_ex(sizeof(data_), nullptr, "StructArrayBuffer");
}
~StructArrayBuffer()
{
@@ -76,32 +76,53 @@ class StructArrayBuffer {
BLI_assert(index < len);
return data_[index];
}
+
+ /**
+ * Iterator
+ */
+ const T *begin() const
+ {
+ return data_;
+ }
+ const T *end() const
+ {
+ return data_ + len;
+ }
+
+ T *begin()
+ {
+ return data_;
+ }
+ T *end()
+ {
+ return data_ + len;
+ }
};
/** Simpler version where data is not an array. */
template<typename T> class StructBuffer : public T {
private:
- GPUUniformBuf *ubo;
+ GPUUniformBuf *ubo_;
public:
StructBuffer()
{
- ubo = GPU_uniformbuf_create_ex(sizeof(T), nullptr, STRINGIFY(T));
+ ubo_ = GPU_uniformbuf_create_ex(sizeof(T), nullptr, "StructBuffer");
}
~StructBuffer()
{
- GPU_uniformbuf_free(ubo);
+ DRW_UBO_FREE_SAFE(ubo_);
}
void push_update(void)
{
T *data = static_cast<T *>(this);
- GPU_uniformbuf_update(ubo, data);
+ GPU_uniformbuf_update(ubo_, data);
}
const GPUUniformBuf *ubo_get(void) const
{
- return ubo;
+ return ubo_;
}
StructBuffer<T> &operator=(const T &other)
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_camera_velocity_frag.glsl b/source/blender/draw/engines/eevee/shaders/eevee_camera_velocity_frag.glsl
new file mode 100644
index 00000000000..c94be67b542
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_camera_velocity_frag.glsl
@@ -0,0 +1,52 @@
+
+/**
+ * Extract two 2D screen space velocity vector from depth buffer.
+ * Note that the offsets are in camera uv space, not view uv space.
+ * xy: Previous position > Current position
+ * zw: Current position > Next position
+ **/
+
+#pragma BLENDER_REQUIRE(common_math_lib.glsl)
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+
+layout(std140) uniform camera_prev_block
+{
+ CameraData cam_prev;
+};
+
+layout(std140) uniform camera_curr_block
+{
+ CameraData cam_curr;
+};
+
+layout(std140) uniform camera_next_block
+{
+ CameraData cam_next;
+};
+
+uniform sampler2D depth_tx;
+
+in vec4 uvcoordsvar;
+
+layout(location = 0) out vec4 out_velocity_camera;
+layout(location = 1) out vec4 out_velocity_view;
+
+void main(void)
+{
+ float depth = textureLod(depth_tx, uvcoordsvar.xy, 0.0).r;
+
+ vec3 P = get_world_space_from_depth(uvcoordsvar.xy, depth);
+ vec3 P_prev = P, P_next = P;
+
+ if (depth == 1.0) {
+ /* Background case. Only compute rotation velocity. */
+ vec3 V = -cameraVec(P);
+ P_prev = cam_prev.viewinv[3].xyz + V;
+ P_next = cam_next.viewinv[3].xyz + V;
+ }
+
+ compute_velocity(
+ P_prev, P, P_next, cam_prev, cam_curr, cam_next, out_velocity_camera, out_velocity_view);
+}
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_film_lib.glsl b/source/blender/draw/engines/eevee/shaders/eevee_film_lib.glsl
index b3420d31380..d9d7187d25a 100644
--- a/source/blender/draw/engines/eevee/shaders/eevee_film_lib.glsl
+++ b/source/blender/draw/engines/eevee/shaders/eevee_film_lib.glsl
@@ -25,6 +25,10 @@ vec4 film_data_encode(FilmData film, vec4 data, float weight)
else if (film.data_type == FILM_DATA_DEPTH) {
/* TODO(fclem) Depth should be converted to radial depth in panoramic projection. */
}
+ else if (film.data_type == FILM_DATA_MOTION) {
+ /* Motion vectors are in camera uv space. But final motion vectors are in pixel units. */
+ data *= film.uv_scale_inv.xyxy;
+ }
if (film_is_color_data(film)) {
data *= weight;
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_gather_frag.glsl b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_gather_frag.glsl
new file mode 100644
index 00000000000..9b73f2d2a10
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_gather_frag.glsl
@@ -0,0 +1,220 @@
+
+/**
+ * Perform two gather blur in the 2 motion blur directions
+ * Based on:
+ * A Fast and Stable Feature-Aware Motion Blur Filter
+ * by Jean-Philippe Guertin, Morgan McGuire, Derek Nowrouzezahrai
+ *
+ * With modification from the presentation:
+ * Next Generation Post Processing in Call of Duty Advanced Warfare
+ * by Jorge Jimenez
+ */
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(common_math_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_motion_blur_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_sampling_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+
+layout(std140) uniform sampling_block
+{
+ SamplingData sampling;
+};
+
+layout(std140) uniform motion_blur_block
+{
+ MotionBlurData mb;
+};
+
+uniform sampler2D color_tx;
+uniform sampler2D depth_tx;
+uniform sampler2D velocity_tx;
+uniform sampler2D tiles_tx;
+
+in vec4 uvcoordsvar;
+
+out vec4 out_color;
+
+const int gather_sample_count = 8;
+
+vec2 spread_compare(float center_motion_length, float sample_motion_length, float offset_length)
+{
+ return saturate(vec2(center_motion_length, sample_motion_length) - offset_length + 1.0);
+}
+
+vec2 depth_compare(float center_depth, float sample_depth)
+{
+ return saturate(0.5 + vec2(-mb.depth_scale, mb.depth_scale) * (sample_depth - center_depth));
+}
+
+/* Kill contribution if not going the same direction. */
+float dir_compare(vec2 offset, vec2 sample_motion, float sample_motion_length)
+{
+ if (sample_motion_length < 0.5) {
+ return 1.0;
+ }
+ return (dot(offset, sample_motion) > 0.0) ? 1.0 : 0.0;
+}
+
+/* Return background (x) and foreground (y) weights. */
+vec2 sample_weights(float center_depth,
+ float sample_depth,
+ float center_motion_length,
+ float sample_motion_length,
+ float offset_length)
+{
+ /* Classify foreground/background. */
+ vec2 depth_weight = depth_compare(center_depth, sample_depth);
+ /* Weight if sample is overlapping or under the center pixel. */
+ vec2 spread_weight = spread_compare(center_motion_length, sample_motion_length, offset_length);
+ return depth_weight * spread_weight;
+}
+
+void gather_sample(vec2 screen_uv,
+ float center_depth,
+ float center_motion_len,
+ vec2 offset,
+ float offset_len,
+ const bool next,
+ inout vec4 accum,
+ inout vec4 accum_bg,
+ inout vec3 w_accum)
+{
+ vec2 sample_uv = screen_uv - offset * mb.target_size_inv;
+ vec2 sample_motion = sample_velocity(mb, velocity_tx, sample_uv, next);
+ float sample_motion_len = length(sample_motion);
+ float sample_depth = texture(depth_tx, sample_uv).r;
+ vec4 sample_color = textureLod(color_tx, sample_uv, 0.0);
+
+ /* Meh, a quirk of the motion vector pass... */
+ sample_motion = (next) ? -sample_motion : sample_motion;
+
+ sample_depth = get_view_z_from_depth(sample_depth);
+
+ vec3 weights;
+ weights.xy = sample_weights(
+ center_depth, sample_depth, center_motion_len, sample_motion_len, offset_len);
+ weights.z = dir_compare(offset, sample_motion, sample_motion_len);
+ weights.xy *= weights.z;
+
+ accum += sample_color * weights.y;
+ accum_bg += sample_color * weights.x;
+ w_accum += weights;
+}
+
+void gather_blur(vec2 screen_uv,
+ vec2 center_motion,
+ float center_depth,
+ vec2 max_motion,
+ float ofs,
+ const bool next,
+ inout vec4 accum,
+ inout vec4 accum_bg,
+ inout vec3 w_accum)
+{
+ float center_motion_len = length(center_motion);
+ float max_motion_len = length(max_motion);
+
+ /* Tile boundaries randomization can fetch a tile where there is less motion than this pixel.
+ * Fix this by overriding the max_motion. */
+ if (max_motion_len < center_motion_len) {
+ max_motion_len = center_motion_len;
+ max_motion = center_motion;
+ }
+
+ if (max_motion_len < 0.5) {
+ return;
+ }
+
+ int i;
+ float t, inc = 1.0 / float(gather_sample_count);
+ for (i = 0, t = ofs * inc; i < gather_sample_count; i++, t += inc) {
+ gather_sample(screen_uv,
+ center_depth,
+ center_motion_len,
+ max_motion * t,
+ max_motion_len * t,
+ next,
+ accum,
+ accum_bg,
+ w_accum);
+ }
+
+ if (center_motion_len < 0.5) {
+ return;
+ }
+
+ for (i = 0, t = ofs * inc; i < gather_sample_count; i++, t += inc) {
+ /* Also sample in center motion direction.
+ * Allow recovering motion where there is conflicting
+ * motion between foreground and background. */
+ gather_sample(screen_uv,
+ center_depth,
+ center_motion_len,
+ center_motion * t,
+ center_motion_len * t,
+ next,
+ accum,
+ accum_bg,
+ w_accum);
+ }
+}
+
+void main()
+{
+ vec2 uv = uvcoordsvar.xy;
+
+ /* Data of the center pixel of the gather (target). */
+ float center_depth = get_view_z_from_depth(texture(depth_tx, uv).r);
+ vec4 center_motion = sample_velocity(mb, velocity_tx, ivec2(gl_FragCoord.xy));
+
+ vec4 center_color = textureLod(color_tx, uv, 0.0);
+
+ float noise_offset = sampling_rng_1D_get(sampling, SAMPLING_TIME);
+ /** TODO(fclem) Blue noise. */
+ vec2 rand = vec2(interlieved_gradient_noise(gl_FragCoord.xy, 0, noise_offset),
+ interlieved_gradient_noise(gl_FragCoord.xy, 1, noise_offset));
+
+ /* Randomize tile boundary to avoid ugly discontinuities. Randomize 1/4th of the tile.
+ * Note this randomize only in one direction but in practice it's enough. */
+ rand.x = rand.x * 2.0 - 1.0;
+ ivec2 tile = ivec2(gl_FragCoord.xy + rand.x * float(MB_TILE_DIVISOR) * 0.25) / MB_TILE_DIVISOR;
+ tile = clamp(tile, ivec2(0), textureSize(tiles_tx, 0) - 1);
+ vec4 max_motion = texelFetch(tiles_tx, tile, 0);
+
+ /* First (center) sample: time = T */
+ /* x: Background, y: Foreground, z: dir. */
+ vec3 w_accum = vec3(0.0, 0.0, 1.0);
+ vec4 accum_bg = vec4(0.0);
+ vec4 accum = vec4(0.0);
+ /* First linear gather. time = [T - delta, T] */
+ gather_blur(
+ uv, center_motion.xy, center_depth, max_motion.xy, rand.y, false, accum, accum_bg, w_accum);
+ /* Second linear gather. time = [T, T + delta] */
+ gather_blur(
+ uv, -center_motion.zw, center_depth, -max_motion.zw, rand.y, true, accum, accum_bg, w_accum);
+
+#if 1 /* Own addition. Not present in reference implementation. */
+ /* Avoid division by 0.0. */
+ float w = 1.0 / (50.0 * float(gather_sample_count) * 4.0);
+ accum_bg += center_color * w;
+ w_accum.x += w;
+ /* NOTE: In Jimenez's presentation, they used center sample.
+ * We use background color as it contains more information for foreground
+ * elements that have not enough weights.
+ * Yield better blur in complex motion. */
+ center_color = accum_bg / w_accum.x;
+#endif
+ /* Merge background. */
+ accum += accum_bg;
+ w_accum.y += w_accum.x;
+ /* Balance accumulation for failed samples.
+ * We replace the missing foreground by the background. */
+ float blend_fac = saturate(1.0 - w_accum.y / w_accum.z);
+ out_color = (accum / w_accum.z) + center_color * blend_fac;
+
+#if 0 /* For debugging. */
+ out_color.rgb = out_color.ggg;
+ out_color.rg += max_motion.xy;
+#endif
+}
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_lib.glsl b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_lib.glsl
new file mode 100644
index 00000000000..f88b8dee5d7
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_lib.glsl
@@ -0,0 +1,18 @@
+
+#pragma BLENDER_REQUIRE(eevee_motion_blur_lib.glsl)
+
+/* Converts uv velocity into pixel space. Assumes velocity_tx is the same resolution as the
+ * target post-fx framebuffer. */
+vec4 sample_velocity(MotionBlurData mb, sampler2D velocity_tx, ivec2 texel)
+{
+ vec4 velocity = texelFetch(velocity_tx, texel, 0);
+ velocity *= vec2(textureSize(velocity_tx, 0)).xyxy;
+ velocity = (mb.is_viewport) ? velocity.xyxy : velocity;
+ return velocity;
+}
+vec2 sample_velocity(MotionBlurData mb, sampler2D velocity_tx, vec2 uv, const bool next)
+{
+ vec4 velocity = texture(velocity_tx, uv);
+ velocity *= vec2(textureSize(velocity_tx, 0)).xyxy;
+ return (next && !mb.is_viewport) ? velocity.zw : velocity.xy;
+}
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_dilate_frag.glsl b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_dilate_frag.glsl
new file mode 100644
index 00000000000..786d6131eaa
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_dilate_frag.glsl
@@ -0,0 +1,113 @@
+
+/**
+ * Samples a 3x3 tile neighborhood to find potentially intersecting motions.
+ * Outputs the largest intersecting motion vector in the neighboorhod.
+ *
+ * Based on:
+ * A Fast and Stable Feature-Aware Motion Blur Filter
+ * by Jean-Philippe Guertin, Morgan McGuire, Derek Nowrouzezahrai
+ *
+ * Adapted from G3D Innovation Engine implementation.
+ */
+
+#pragma BLENDER_REQUIRE(common_math_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+
+layout(std140) uniform motion_blur_block
+{
+ MotionBlurData mb;
+};
+
+uniform sampler2D tiles_tx;
+
+layout(location = 0) out vec4 out_max_motion;
+
+bool neighbor_affect_this_tile(ivec2 offset, vec2 velocity)
+{
+ /* Manhattan distance to the tiles, which is used for
+ * differentiating corners versus middle blocks */
+ float displacement = float(abs(offset.x) + abs(offset.y));
+ /**
+ * Relative sign on each axis of the offset compared
+ * to the velocity for that tile. In order for a tile
+ * to affect the center tile, it must have a
+ * neighborhood velocity in which x and y both have
+ * identical or both have opposite signs relative to
+ * offset. If the offset coordinate is zero then
+ * velocity is irrelevant.
+ */
+ vec2 point = sign(offset * velocity);
+
+ float dist = (point.x + point.y);
+ /**
+ * Here's an example of the logic for this code.
+ * In this diagram, the upper-left tile has offset = (-1, -1).
+ * V1 is velocity = (1, -2). point in this case = (-1, 1), and therefore dist = 0,
+ * so the upper-left tile does not affect the center.
+ *
+ * Now, look at another case. V2 = (-1, -2). point = (1, 1), so dist = 2 and the tile
+ * does affect the center.
+ *
+ * V2(-1,-2) V1(1, -2)
+ * \ /
+ * \ /
+ * \/___ ____ ____
+ * (-1, -1)| | | |
+ * |____|____|____|
+ * | | | |
+ * |____|____|____|
+ * | | | |
+ * |____|____|____|
+ */
+ return (abs(dist) == displacement);
+}
+
+/**
+ * Only gather neighborhood velocity into tiles that could be affected by it.
+ * In the general case, only six of the eight neighbors contribute:
+ *
+ * This tile can't possibly be affected by the center one
+ * |
+ * v
+ * ____ ____ ____
+ * | | ///|/// |
+ * |____|////|//__|
+ * | |////|/ |
+ * |___/|////|____|
+ * | //|////| | <--- This tile can't possibly be affected by the center one
+ * |_///|///_|____|
+ */
+void main()
+{
+ ivec2 tile = ivec2(gl_FragCoord.xy);
+ ivec2 texture_bounds = textureSize(tiles_tx, 0) - 1;
+
+ out_max_motion = vec4(0.0);
+ float max_motion_len_sqr_prev = -1.0;
+ float max_motion_len_sqr_next = -1.0;
+
+ ivec2 offset = ivec2(0);
+ for (offset.y = -1; offset.y <= 1; ++offset.y) {
+ for (offset.x = -1; offset.x <= 1; ++offset.x) {
+ ivec2 sample_tile = clamp(tile + offset, ivec2(0), texture_bounds);
+ vec4 motion = texelFetch(tiles_tx, sample_tile, 0);
+
+ float motion_len_sqr_prev = len_squared(motion.xy);
+ float motion_len_sqr_next = len_squared(motion.zw);
+
+ if (motion_len_sqr_prev > max_motion_len_sqr_prev) {
+ if (neighbor_affect_this_tile(offset, motion.xy)) {
+ max_motion_len_sqr_prev = motion_len_sqr_prev;
+ out_max_motion.xy = motion.xy;
+ }
+ }
+
+ if (motion_len_sqr_next > max_motion_len_sqr_next) {
+ if (neighbor_affect_this_tile(offset, motion.zw)) {
+ max_motion_len_sqr_next = motion_len_sqr_next;
+ out_max_motion.zw = motion.zw;
+ }
+ }
+ }
+ }
+}
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_flatten_frag.glsl b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_flatten_frag.glsl
new file mode 100644
index 00000000000..0cf380d6191
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_motion_blur_tiles_flatten_frag.glsl
@@ -0,0 +1,51 @@
+
+/**
+ * Shaders that down-sample velocity buffer into squared tile of MB_TILE_DIVISOR pixels wide.
+ * Outputs the largest motion vector in the tile area.
+ *
+ * Based on:
+ * A Fast and Stable Feature-Aware Motion Blur Filter
+ * by Jean-Philippe Guertin, Morgan McGuire, Derek Nowrouzezahrai
+ *
+ * Adapted from G3D Innovation Engine implementation.
+ */
+
+#pragma BLENDER_REQUIRE(common_math_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_motion_blur_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+
+layout(std140) uniform motion_blur_block
+{
+ MotionBlurData mb;
+};
+
+uniform sampler2D velocity_tx;
+
+layout(location = 0) out vec4 out_max_motion;
+
+void main()
+{
+ ivec2 texture_bounds = textureSize(velocity_tx, 0) - 1;
+ ivec2 tile_co = ivec2(gl_FragCoord.xy);
+
+ float max_motion_len_sqr_prev = -1.0;
+ float max_motion_len_sqr_next = -1.0;
+ for (int x = 0; x < MB_TILE_DIVISOR; x++) {
+ for (int y = 0; y < MB_TILE_DIVISOR; y++) {
+ ivec2 sample_texel = tile_co * MB_TILE_DIVISOR + ivec2(x, y);
+ vec4 motion = sample_velocity(mb, velocity_tx, min(sample_texel, texture_bounds));
+
+ float motion_len_sqr_prev = len_squared(motion.xy);
+ float motion_len_sqr_next = len_squared(motion.zw);
+
+ if (motion_len_sqr_prev > max_motion_len_sqr_prev) {
+ max_motion_len_sqr_prev = motion_len_sqr_prev;
+ out_max_motion.xy = motion.xy;
+ }
+ if (motion_len_sqr_next > max_motion_len_sqr_next) {
+ max_motion_len_sqr_next = motion_len_sqr_next;
+ out_max_motion.zw = motion.zw;
+ }
+ }
+ }
+}
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_object_forward_frag.glsl b/source/blender/draw/engines/eevee/shaders/eevee_object_forward_frag.glsl
index 202b011f172..61a4bb4418e 100644
--- a/source/blender/draw/engines/eevee/shaders/eevee_object_forward_frag.glsl
+++ b/source/blender/draw/engines/eevee/shaders/eevee_object_forward_frag.glsl
@@ -14,12 +14,12 @@ void main(void)
g_surf = init_from_interp();
/* Prevent precision issues on unit coordinates. */
- vec3 p = (g_surf.P + 0.000001) * 0.999999;
+ vec3 p = (g_surf.P * 10.0 + 0.000001) * 0.999999;
int xi = int(abs(floor(p.x)));
int yi = int(abs(floor(p.y)));
int zi = int(abs(floor(p.z)));
bool check = ((mod(xi, 2) == mod(yi, 2)) == bool(mod(zi, 2)));
- outRadiance = vec4(vec3(g_surf.N.z * 0.5 + 0.5) * mix(0.5, 0.8, check), 1.0);
+ outRadiance = vec4(vec3(g_surf.N.z * 0.5 + 0.5) * mix(0.2, 0.8, check), 1.0);
outTransmittance = vec4(0.0, 0.0, 0.0, 1.0);
} \ No newline at end of file
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_frag.glsl b/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_frag.glsl
new file mode 100644
index 00000000000..37266335fd4
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_frag.glsl
@@ -0,0 +1,43 @@
+
+/**
+ * Output two 2D screen space velocity vector from object motion.
+ * There is a separate output for view and camera vectors.
+ * Camera vectors are used for reprojections and view vectors are used for motion blur fx.
+ * xy: Previous position > Current position
+ * zw: Current position > Next position
+ **/
+
+#pragma BLENDER_REQUIRE(common_math_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_object_velocity_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+
+layout(std140) uniform camera_prev_block
+{
+ CameraData cam_prev;
+};
+
+layout(std140) uniform camera_curr_block
+{
+ CameraData cam_curr;
+};
+
+layout(std140) uniform camera_next_block
+{
+ CameraData cam_next;
+};
+
+layout(location = 0) out vec4 out_velocity_camera;
+layout(location = 1) out vec4 out_velocity_view;
+
+void main(void)
+{
+ compute_velocity(interp.P_prev,
+ interp.P,
+ interp.P_next,
+ cam_prev,
+ cam_curr,
+ cam_next,
+ out_velocity_camera,
+ out_velocity_view);
+}
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_lib.glsl b/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_lib.glsl
new file mode 100644
index 00000000000..3740a4a5761
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_lib.glsl
@@ -0,0 +1,8 @@
+
+IN_OUT MeshDataInterface
+{
+ vec3 P;
+ vec3 P_next;
+ vec3 P_prev;
+}
+interp;
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_mesh_vert.glsl b/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_mesh_vert.glsl
new file mode 100644
index 00000000000..bf78b6f04c1
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_object_velocity_mesh_vert.glsl
@@ -0,0 +1,40 @@
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_object_velocity_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+
+layout(std140) uniform object_block
+{
+ VelocityObjectData velocity;
+};
+
+uniform int data_offset;
+
+in vec3 pos;
+in vec3 prv;
+in vec3 nxt;
+
+vec3 velocity_object_to_world_prev(VelocityObjectData data, vec3 prev_pos, vec3 current_pos)
+{
+ /* Encoded use_deform inside the matrix to save up space. */
+ bool use_deform = data.next_object_mat[3][3] == 0.0;
+ return transform_point(data.prev_object_mat, use_deform ? prev_pos : current_pos);
+}
+
+vec3 velocity_object_to_world_next(VelocityObjectData data, vec3 next_pos, vec3 current_pos)
+{
+ /* Encoded use_deform inside the matrix to save up space. */
+ bool use_deform = data.next_object_mat[3][3] == 0.0;
+ mat4 obmat = data.next_object_mat;
+ obmat[3][3] = 1.0;
+ return transform_point(obmat, use_deform ? next_pos : current_pos);
+}
+
+void main(void)
+{
+ interp.P = point_object_to_world(pos);
+ interp.P_prev = velocity_object_to_world_prev(velocity, prv, pos);
+ interp.P_next = velocity_object_to_world_next(velocity, nxt, pos);
+
+ gl_Position = point_world_to_ndc(interp.P);
+} \ No newline at end of file
diff --git a/source/blender/draw/engines/eevee/shaders/eevee_velocity_lib.glsl b/source/blender/draw/engines/eevee/shaders/eevee_velocity_lib.glsl
new file mode 100644
index 00000000000..766cb9b8cce
--- /dev/null
+++ b/source/blender/draw/engines/eevee/shaders/eevee_velocity_lib.glsl
@@ -0,0 +1,38 @@
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_shader_shared.hh)
+#pragma BLENDER_REQUIRE(eevee_camera_lib.glsl)
+
+void compute_velocity(vec3 P_prev,
+ vec3 P,
+ vec3 P_next,
+ CameraData cam_prev,
+ CameraData cam_curr,
+ CameraData cam_next,
+ out vec4 velocity_camera,
+ out vec4 velocity_view)
+{
+ vec2 prev_uv, curr_uv, next_uv;
+ prev_uv = camera_uv_from_world(cam_prev, P_prev);
+ curr_uv = camera_uv_from_world(cam_curr, P);
+ next_uv = camera_uv_from_world(cam_next, P_next);
+
+ velocity_camera.xy = prev_uv - curr_uv;
+ velocity_camera.zw = curr_uv - next_uv;
+
+ if (is_panoramic(cam_curr.type)) {
+ /* This path is only used if using using panoramic projections. Since the views always have
+ * the same 45° aperture angle, we can safely reuse the projection matrix. */
+ prev_uv = transform_point(ProjectionMatrix, transform_point(cam_prev.viewmat, P_prev)).xy;
+ curr_uv = transform_point(ViewProjectionMatrix, P).xy;
+ next_uv = transform_point(ProjectionMatrix, transform_point(cam_next.viewmat, P_next)).xy;
+
+ velocity_view.xy = prev_uv - curr_uv;
+ velocity_view.zw = curr_uv - next_uv;
+ /* Convert NDC velocity to UV velocity */
+ velocity_view *= 0.5;
+ }
+ else {
+ velocity_view = velocity_camera;
+ }
+}