Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClément Foucault <foucault.clem@gmail.com>2022-05-18 23:12:07 +0300
committerClément Foucault <foucault.clem@gmail.com>2022-05-19 00:01:08 +0300
commit0fcfc4cc5be839da4cbd5aa84443b6954e6ebbf4 (patch)
tree17cf8e13d2550b37ab586286cf2cdbc6b05a744a /source/blender/draw
parent33c5adba627b60d8a7504704bc5e9c80d7bd5ff0 (diff)
EEVEE-Next: Add Velocity module
This module allow tracking of object and geometry data accross time. This commit adds no user visible changes. It work in both viewport (*) and render mode, gives correct motion for any camera projection type and is compatible with displacement (**). It is a huge improvement upon the old EEVEE velocity which was only used for motion blur and only available in render. It is also an improvement for speed as the animated objects do not need to be rendered a 3rd time. The code is also much cleaner: no GPUVertBuf duplication, no GPUBatch amendment, no special cases for different geometry types, no DRWShadingGroup per object, no double buffering of velocity. The module is still work in progress as the final output may still be flawed. (*): Viewport support is already working but there might be some cases where mapping will fail. For instance if topology changes but not vertex count. (**): Displacement does not contribute to motion vectors. Surfaces using displacement will have the same motion vectors as if they were not displaced.
Diffstat (limited to 'source/blender/draw')
-rw-r--r--source/blender/draw/CMakeLists.txt6
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_instance.cc10
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_instance.hh32
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_material.cc25
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_material.hh28
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_pipeline.cc27
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_pipeline.hh25
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_shader.cc6
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_shader.hh4
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_shader_shared.hh51
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_sync.cc25
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_sync.hh1
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_velocity.cc420
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_velocity.hh178
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_view.cc28
-rw-r--r--source/blender/draw/engines/eevee_next/eevee_view.hh8
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl13
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl11
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl11
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl13
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl101
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl58
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh4
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh55
24 files changed, 1082 insertions, 58 deletions
diff --git a/source/blender/draw/CMakeLists.txt b/source/blender/draw/CMakeLists.txt
index e5aca26c43c..3e90c2cb707 100644
--- a/source/blender/draw/CMakeLists.txt
+++ b/source/blender/draw/CMakeLists.txt
@@ -141,6 +141,7 @@ set(SRC
engines/eevee_next/eevee_shader.cc
engines/eevee_next/eevee_sync.cc
engines/eevee_next/eevee_view.cc
+ engines/eevee_next/eevee_velocity.cc
engines/eevee_next/eevee_world.cc
engines/workbench/workbench_data.c
engines/workbench/workbench_effect_antialiasing.c
@@ -364,6 +365,11 @@ set(GLSL_SRC
engines/eevee_next/shaders/eevee_surf_forward_frag.glsl
engines/eevee_next/shaders/eevee_surf_lib.glsl
engines/eevee_next/shaders/eevee_surf_world_frag.glsl
+ engines/eevee_next/shaders/eevee_velocity_lib.glsl
+ engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl
+
+ engines/eevee_next/eevee_defines.hh
+ engines/eevee_next/eevee_shader_shared.hh
engines/workbench/shaders/workbench_cavity_lib.glsl
engines/workbench/shaders/workbench_common_lib.glsl
diff --git a/source/blender/draw/engines/eevee_next/eevee_instance.cc b/source/blender/draw/engines/eevee_next/eevee_instance.cc
index f79b692018f..263acd701a2 100644
--- a/source/blender/draw/engines/eevee_next/eevee_instance.cc
+++ b/source/blender/draw/engines/eevee_next/eevee_instance.cc
@@ -55,6 +55,13 @@ void Instance::init(const int2 &output_res,
main_view.init(output_res);
}
+void Instance::set_time(float time)
+{
+ BLI_assert(render);
+ DRW_render_set_time(render, depsgraph, floorf(time), fractf(time));
+ update_eval_members();
+}
+
void Instance::update_eval_members()
{
scene = DEG_get_evaluated_scene(depsgraph);
@@ -77,6 +84,7 @@ void Instance::update_eval_members()
void Instance::begin_sync()
{
materials.begin_sync();
+ velocity.begin_sync();
pipelines.sync();
main_view.sync();
@@ -136,6 +144,7 @@ void Instance::object_sync(Object *ob)
void Instance::end_sync()
{
+ velocity.end_sync();
}
void Instance::render_sync()
@@ -172,6 +181,7 @@ void Instance::draw_viewport(DefaultFramebufferList *dfbl)
{
UNUSED_VARS(dfbl);
render_sample();
+ velocity.step_swap();
}
/** \} */
diff --git a/source/blender/draw/engines/eevee_next/eevee_instance.hh b/source/blender/draw/engines/eevee_next/eevee_instance.hh
index 47fa005d3d0..dd72169c854 100644
--- a/source/blender/draw/engines/eevee_next/eevee_instance.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_instance.hh
@@ -15,6 +15,7 @@
#include "DNA_lightprobe_types.h"
#include "DRW_render.h"
+#include "eevee_camera.hh"
#include "eevee_material.hh"
#include "eevee_pipeline.hh"
#include "eevee_shader.hh"
@@ -29,11 +30,15 @@ namespace blender::eevee {
* \brief A running instance of the engine.
*/
class Instance {
+ friend VelocityModule;
+
public:
ShaderModule &shaders;
SyncModule sync;
MaterialModule materials;
PipelineModule pipelines;
+ VelocityModule velocity;
+ Camera camera;
MainView main_view;
World world;
@@ -61,6 +66,8 @@ class Instance {
sync(*this),
materials(*this),
pipelines(*this),
+ velocity(*this),
+ camera(*this),
main_view(*this),
world(*this){};
~Instance(){};
@@ -85,12 +92,37 @@ class Instance {
void draw_viewport(DefaultFramebufferList *dfbl);
+ bool is_viewport(void)
+ {
+ return !DRW_state_is_scene_render();
+ }
+
+ bool use_scene_lights(void) const
+ {
+ return (!v3d) ||
+ ((v3d->shading.type == OB_MATERIAL) &&
+ (v3d->shading.flag & V3D_SHADING_SCENE_LIGHTS)) ||
+ ((v3d->shading.type == OB_RENDER) &&
+ (v3d->shading.flag & V3D_SHADING_SCENE_LIGHTS_RENDER));
+ }
+
+ /* Light the scene using the selected HDRI in the viewport shading pop-over. */
+ bool use_studio_light(void) const
+ {
+ return (v3d) && (((v3d->shading.type == OB_MATERIAL) &&
+ ((v3d->shading.flag & V3D_SHADING_SCENE_WORLD) == 0)) ||
+ ((v3d->shading.type == OB_RENDER) &&
+ ((v3d->shading.flag & V3D_SHADING_SCENE_WORLD_RENDER) == 0)));
+ }
+
private:
void render_sample();
void mesh_sync(Object *ob, ObjectHandle &ob_handle);
void update_eval_members();
+
+ void set_time(float time);
};
} // namespace blender::eevee
diff --git a/source/blender/draw/engines/eevee_next/eevee_material.cc b/source/blender/draw/engines/eevee_next/eevee_material.cc
index 5d4e2d7752d..d28e165478f 100644
--- a/source/blender/draw/engines/eevee_next/eevee_material.cc
+++ b/source/blender/draw/engines/eevee_next/eevee_material.cc
@@ -232,17 +232,21 @@ MaterialPass MaterialModule::material_pass_get(::Material *blender_mat,
return matpass;
}
-Material &MaterialModule::material_sync(::Material *blender_mat, eMaterialGeometry geometry_type)
+Material &MaterialModule::material_sync(::Material *blender_mat,
+ eMaterialGeometry geometry_type,
+ bool has_motion)
{
eMaterialPipeline surface_pipe = (blender_mat->blend_method == MA_BM_BLEND) ? MAT_PIPE_FORWARD :
MAT_PIPE_DEFERRED;
eMaterialPipeline prepass_pipe = (blender_mat->blend_method == MA_BM_BLEND) ?
- MAT_PIPE_FORWARD_PREPASS :
- MAT_PIPE_DEFERRED_PREPASS;
+ (has_motion ? MAT_PIPE_FORWARD_PREPASS_VELOCITY :
+ MAT_PIPE_FORWARD_PREPASS) :
+ (has_motion ? MAT_PIPE_DEFERRED_PREPASS_VELOCITY :
+ MAT_PIPE_DEFERRED_PREPASS);
- /* Test */
+ /* TEST until we have defered pipeline up and running. */
surface_pipe = MAT_PIPE_FORWARD;
- prepass_pipe = MAT_PIPE_FORWARD_PREPASS;
+ prepass_pipe = has_motion ? MAT_PIPE_FORWARD_PREPASS_VELOCITY : MAT_PIPE_FORWARD_PREPASS;
MaterialKey material_key(blender_mat, geometry_type, surface_pipe);
@@ -288,7 +292,7 @@ Material &MaterialModule::material_sync(::Material *blender_mat, eMaterialGeomet
/* Returned Material references are valid until the next call to this function or
* material_get(). */
-MaterialArray &MaterialModule::material_array_get(Object *ob)
+MaterialArray &MaterialModule::material_array_get(Object *ob, bool has_motion)
{
material_array_.materials.clear();
material_array_.gpu_materials.clear();
@@ -297,7 +301,7 @@ MaterialArray &MaterialModule::material_array_get(Object *ob)
for (auto i : IndexRange(materials_len)) {
::Material *blender_mat = material_from_slot(ob, i);
- Material &mat = material_sync(blender_mat, to_material_geometry(ob));
+ Material &mat = material_sync(blender_mat, to_material_geometry(ob), has_motion);
material_array_.materials.append(&mat);
material_array_.gpu_materials.append(mat.shading.gpumat);
}
@@ -306,10 +310,13 @@ MaterialArray &MaterialModule::material_array_get(Object *ob)
/* Returned Material references are valid until the next call to this function or
* material_array_get(). */
-Material &MaterialModule::material_get(Object *ob, int mat_nr, eMaterialGeometry geometry_type)
+Material &MaterialModule::material_get(Object *ob,
+ bool has_motion,
+ int mat_nr,
+ eMaterialGeometry geometry_type)
{
::Material *blender_mat = material_from_slot(ob, mat_nr);
- Material &mat = material_sync(blender_mat, geometry_type);
+ Material &mat = material_sync(blender_mat, geometry_type, has_motion);
return mat;
}
diff --git a/source/blender/draw/engines/eevee_next/eevee_material.hh b/source/blender/draw/engines/eevee_next/eevee_material.hh
index af9ff6bf6ba..6cc5b483fc3 100644
--- a/source/blender/draw/engines/eevee_next/eevee_material.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_material.hh
@@ -27,19 +27,21 @@ class Instance;
enum eMaterialPipeline {
MAT_PIPE_DEFERRED = 0,
- MAT_PIPE_FORWARD = 1,
- MAT_PIPE_DEFERRED_PREPASS = 2,
- MAT_PIPE_FORWARD_PREPASS = 3,
- MAT_PIPE_VOLUME = 4,
- MAT_PIPE_SHADOW = 5,
+ MAT_PIPE_FORWARD,
+ MAT_PIPE_DEFERRED_PREPASS,
+ MAT_PIPE_DEFERRED_PREPASS_VELOCITY,
+ MAT_PIPE_FORWARD_PREPASS,
+ MAT_PIPE_FORWARD_PREPASS_VELOCITY,
+ MAT_PIPE_VOLUME,
+ MAT_PIPE_SHADOW,
};
enum eMaterialGeometry {
MAT_GEOM_MESH = 0,
- MAT_GEOM_CURVES = 1,
- MAT_GEOM_GPENCIL = 2,
- MAT_GEOM_VOLUME = 3,
- MAT_GEOM_WORLD = 4,
+ MAT_GEOM_CURVES,
+ MAT_GEOM_GPENCIL,
+ MAT_GEOM_VOLUME,
+ MAT_GEOM_WORLD,
};
static inline void material_type_from_shader_uuid(uint64_t shader_uuid,
@@ -240,11 +242,13 @@ class MaterialModule {
void begin_sync();
- MaterialArray &material_array_get(Object *ob);
- Material &material_get(Object *ob, int mat_nr, eMaterialGeometry geometry_type);
+ MaterialArray &material_array_get(Object *ob, bool has_motion);
+ Material &material_get(Object *ob, bool has_motion, int mat_nr, eMaterialGeometry geometry_type);
private:
- Material &material_sync(::Material *blender_mat, eMaterialGeometry geometry_type);
+ Material &material_sync(::Material *blender_mat,
+ eMaterialGeometry geometry_type,
+ bool has_motion);
::Material *material_from_slot(Object *ob, int slot);
MaterialPass material_pass_get(::Material *blender_mat,
diff --git a/source/blender/draw/engines/eevee_next/eevee_pipeline.cc b/source/blender/draw/engines/eevee_next/eevee_pipeline.cc
index e31372e770d..8ae19eeebb8 100644
--- a/source/blender/draw/engines/eevee_next/eevee_pipeline.cc
+++ b/source/blender/draw/engines/eevee_next/eevee_pipeline.cc
@@ -54,11 +54,17 @@ void ForwardPipeline::sync()
{
DRWState state = DRW_STATE_WRITE_DEPTH | DRW_STATE_DEPTH_LESS;
prepass_ps_ = DRW_pass_create("Forward.Opaque.Prepass", state);
+ prepass_velocity_ps_ = DRW_pass_create("Forward.Opaque.Prepass.Velocity",
+ state | DRW_STATE_WRITE_COLOR);
state |= DRW_STATE_CULL_BACK;
prepass_culled_ps_ = DRW_pass_create("Forward.Opaque.Prepass.Culled", state);
+ prepass_culled_velocity_ps_ = DRW_pass_create("Forward.Opaque.Prepass.Velocity",
+ state | DRW_STATE_WRITE_COLOR);
- DRW_pass_link(prepass_ps_, prepass_culled_ps_);
+ DRW_pass_link(prepass_ps_, prepass_velocity_ps_);
+ DRW_pass_link(prepass_velocity_ps_, prepass_culled_ps_);
+ DRW_pass_link(prepass_culled_ps_, prepass_culled_velocity_ps_);
}
{
DRWState state = DRW_STATE_WRITE_COLOR | DRW_STATE_DEPTH_EQUAL;
@@ -110,11 +116,17 @@ DRWShadingGroup *ForwardPipeline::material_opaque_add(::Material *blender_mat, G
return grp;
}
-DRWShadingGroup *ForwardPipeline::prepass_opaque_add(::Material *blender_mat, GPUMaterial *gpumat)
+DRWShadingGroup *ForwardPipeline::prepass_opaque_add(::Material *blender_mat,
+ GPUMaterial *gpumat,
+ bool has_motion)
{
- DRWPass *pass = (blender_mat->blend_flag & MA_BL_CULL_BACKFACE) ? prepass_culled_ps_ :
- prepass_ps_;
+ DRWPass *pass = (blender_mat->blend_flag & MA_BL_CULL_BACKFACE) ?
+ (has_motion ? prepass_culled_velocity_ps_ : prepass_culled_ps_) :
+ (has_motion ? prepass_velocity_ps_ : prepass_ps_);
DRWShadingGroup *grp = DRW_shgroup_material_create(gpumat, pass);
+ if (has_motion) {
+ inst_.velocity.bind_resources(grp);
+ }
return grp;
}
@@ -181,15 +193,19 @@ DRWShadingGroup *ForwardPipeline::prepass_transparent_add(::Material *blender_ma
}
void ForwardPipeline::render(const DRWView *view,
+ Framebuffer &prepass_fb,
+ Framebuffer &combined_fb,
GPUTexture *depth_tx,
GPUTexture *UNUSED(combined_tx))
{
- UNUSED_VARS(view, depth_tx);
+ UNUSED_VARS(view, depth_tx, prepass_fb, combined_fb);
// HiZBuffer &hiz = inst_.hiz_front;
DRW_stats_group_start("ForwardOpaque");
+ GPU_framebuffer_bind(prepass_fb);
DRW_draw_pass(prepass_ps_);
+
// hiz.set_dirty();
// if (inst_.raytracing.enabled()) {
@@ -199,6 +215,7 @@ void ForwardPipeline::render(const DRWView *view,
// inst_.shadows.set_view(view, depth_tx);
+ GPU_framebuffer_bind(combined_fb);
DRW_draw_pass(opaque_ps_);
DRW_stats_group_end();
diff --git a/source/blender/draw/engines/eevee_next/eevee_pipeline.hh b/source/blender/draw/engines/eevee_next/eevee_pipeline.hh
index a5a6847f62e..35f7712f3d3 100644
--- a/source/blender/draw/engines/eevee_next/eevee_pipeline.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_pipeline.hh
@@ -53,7 +53,9 @@ class ForwardPipeline {
Instance &inst_;
DRWPass *prepass_ps_ = nullptr;
+ DRWPass *prepass_velocity_ps_ = nullptr;
DRWPass *prepass_culled_ps_ = nullptr;
+ DRWPass *prepass_culled_velocity_ps_ = nullptr;
DRWPass *opaque_ps_ = nullptr;
DRWPass *opaque_culled_ps_ = nullptr;
DRWPass *transparent_ps_ = nullptr;
@@ -72,19 +74,25 @@ class ForwardPipeline {
material_opaque_add(blender_mat, gpumat);
}
- DRWShadingGroup *prepass_add(::Material *blender_mat, GPUMaterial *gpumat)
+ DRWShadingGroup *prepass_add(::Material *blender_mat, GPUMaterial *gpumat, bool has_motion)
{
return (GPU_material_flag_get(gpumat, GPU_MATFLAG_TRANSPARENT)) ?
prepass_transparent_add(blender_mat, gpumat) :
- prepass_opaque_add(blender_mat, gpumat);
+ prepass_opaque_add(blender_mat, gpumat, has_motion);
}
DRWShadingGroup *material_opaque_add(::Material *blender_mat, GPUMaterial *gpumat);
- DRWShadingGroup *prepass_opaque_add(::Material *blender_mat, GPUMaterial *gpumat);
+ DRWShadingGroup *prepass_opaque_add(::Material *blender_mat,
+ GPUMaterial *gpumat,
+ bool has_motion);
DRWShadingGroup *material_transparent_add(::Material *blender_mat, GPUMaterial *gpumat);
DRWShadingGroup *prepass_transparent_add(::Material *blender_mat, GPUMaterial *gpumat);
- void render(const DRWView *view, GPUTexture *depth_tx, GPUTexture *combined_tx);
+ void render(const DRWView *view,
+ Framebuffer &prepass_fb,
+ Framebuffer &combined_fb,
+ GPUTexture *depth_tx,
+ GPUTexture *combined_tx);
};
/** \} */
@@ -191,10 +199,15 @@ class PipelineModule {
{
switch (pipeline_type) {
case MAT_PIPE_DEFERRED_PREPASS:
- // return deferred.prepass_add(blender_mat, gpumat);
+ // return deferred.prepass_add(blender_mat, gpumat, false);
+ break;
+ case MAT_PIPE_DEFERRED_PREPASS_VELOCITY:
+ // return deferred.prepass_add(blender_mat, gpumat, true);
break;
case MAT_PIPE_FORWARD_PREPASS:
- return forward.prepass_add(blender_mat, gpumat);
+ return forward.prepass_add(blender_mat, gpumat, false);
+ case MAT_PIPE_FORWARD_PREPASS_VELOCITY:
+ return forward.prepass_add(blender_mat, gpumat, true);
case MAT_PIPE_DEFERRED:
// return deferred.material_add(blender_mat, gpumat);
break;
diff --git a/source/blender/draw/engines/eevee_next/eevee_shader.cc b/source/blender/draw/engines/eevee_next/eevee_shader.cc
index 9efb7a70c7e..009eb54864c 100644
--- a/source/blender/draw/engines/eevee_next/eevee_shader.cc
+++ b/source/blender/draw/engines/eevee_next/eevee_shader.cc
@@ -78,6 +78,8 @@ ShaderModule::~ShaderModule()
const char *ShaderModule::static_shader_create_info_name_get(eShaderType shader_type)
{
switch (shader_type) {
+ case VELOCITY_RESOLVE:
+ return "eevee_velocity_resolve";
/* To avoid compiler warning about missing case. */
case MAX_SHADER_TYPE:
return "";
@@ -289,6 +291,10 @@ void ShaderModule::material_create_info_ammend(GPUMaterial *gpumat, GPUCodegenOu
break;
default:
switch (pipeline_type) {
+ case MAT_PIPE_FORWARD_PREPASS_VELOCITY:
+ case MAT_PIPE_DEFERRED_PREPASS_VELOCITY:
+ info.additional_info("eevee_surf_depth", "eevee_velocity_geom");
+ break;
case MAT_PIPE_FORWARD_PREPASS:
case MAT_PIPE_DEFERRED_PREPASS:
case MAT_PIPE_SHADOW:
diff --git a/source/blender/draw/engines/eevee_next/eevee_shader.hh b/source/blender/draw/engines/eevee_next/eevee_shader.hh
index 29fcbafb167..0f42e880a10 100644
--- a/source/blender/draw/engines/eevee_next/eevee_shader.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_shader.hh
@@ -26,7 +26,9 @@ namespace blender::eevee {
/* Keep alphabetical order and clean prefix. */
enum eShaderType {
- MAX_SHADER_TYPE = 0,
+ VELOCITY_RESOLVE = 0,
+
+ MAX_SHADER_TYPE,
};
/**
diff --git a/source/blender/draw/engines/eevee_next/eevee_shader_shared.hh b/source/blender/draw/engines/eevee_next/eevee_shader_shared.hh
index 97fc9c5a547..d9fee1b6073 100644
--- a/source/blender/draw/engines/eevee_next/eevee_shader_shared.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_shader_shared.hh
@@ -74,6 +74,54 @@ BLI_STATIC_ASSERT_ALIGN(CameraData, 16)
/** \} */
/* -------------------------------------------------------------------- */
+/** \name VelocityModule
+ * \{ */
+
+#define VELOCITY_INVALID 512.0
+
+enum eVelocityStep : uint32_t {
+ STEP_PREVIOUS = 0,
+ STEP_NEXT = 1,
+ STEP_CURRENT = 2,
+};
+
+struct VelocityObjectIndex {
+ /** Offset inside #VelocityObjectBuf for each timestep. Indexed using eVelocityStep. */
+ int3 ofs;
+ /** Temporary index to copy this to the #VelocityIndexBuf. */
+ uint resource_id;
+
+#ifdef __cplusplus
+ VelocityObjectIndex() : ofs(-1, -1, -1), resource_id(-1){};
+#endif
+};
+BLI_STATIC_ASSERT_ALIGN(VelocityObjectIndex, 16)
+
+struct VelocityGeometryIndex {
+ /** Offset inside #VelocityGeometryBuf for each timestep. Indexed using eVelocityStep. */
+ int3 ofs;
+ /** If true, compute deformation motion blur. */
+ bool1 do_deform;
+ /** Length of data inside #VelocityGeometryBuf for each timestep. Indexed using eVelocityStep. */
+ int3 len;
+
+ int _pad0;
+
+#ifdef __cplusplus
+ VelocityGeometryIndex() : ofs(-1, -1, -1), do_deform(false), len(-1, -1, -1), _pad0(1){};
+#endif
+};
+BLI_STATIC_ASSERT_ALIGN(VelocityGeometryIndex, 16)
+
+struct VelocityIndex {
+ VelocityObjectIndex obj;
+ VelocityGeometryIndex geo;
+};
+BLI_STATIC_ASSERT_ALIGN(VelocityGeometryIndex, 16)
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
/** \name Ray-Tracing
* \{ */
@@ -131,6 +179,9 @@ float4 utility_tx_sample(sampler2DArray util_tx, float2 uv, float layer)
#ifdef __cplusplus
using CameraDataBuf = draw::UniformBuffer<CameraData>;
+using VelocityIndexBuf = draw::StorageArrayBuffer<VelocityIndex, 16>;
+using VelocityObjectBuf = draw::StorageArrayBuffer<float4x4, 16>;
+using VelocityGeometryBuf = draw::StorageArrayBuffer<float4, 16, true>;
} // namespace blender::eevee
#endif
diff --git a/source/blender/draw/engines/eevee_next/eevee_sync.cc b/source/blender/draw/engines/eevee_next/eevee_sync.cc
index efa5fdc89ab..42af251d770 100644
--- a/source/blender/draw/engines/eevee_next/eevee_sync.cc
+++ b/source/blender/draw/engines/eevee_next/eevee_sync.cc
@@ -104,7 +104,9 @@ static inline void shgroup_geometry_call(DRWShadingGroup *grp,
void SyncModule::sync_mesh(Object *ob, ObjectHandle &ob_handle)
{
- MaterialArray &material_array = inst_.materials.material_array_get(ob);
+ bool has_motion = inst_.velocity.step_object_sync(ob, ob_handle.object_key, ob_handle.recalc);
+
+ MaterialArray &material_array = inst_.materials.material_array_get(ob, has_motion);
GPUBatch **mat_geom = DRW_cache_object_surface_material_get(
ob, material_array.gpu_materials.data(), material_array.gpu_materials.size());
@@ -129,9 +131,6 @@ void SyncModule::sync_mesh(Object *ob, ObjectHandle &ob_handle)
is_alpha_blend = is_alpha_blend || material->is_alpha_blend_transparent;
}
- UNUSED_VARS(ob_handle);
- // shading_passes.velocity.mesh_add(ob, ob_handle);
-
// shadows.sync_object(ob, ob_handle, is_shadow_caster, is_alpha_blend);
}
@@ -156,8 +155,11 @@ struct gpIterData {
int vcount = 0;
bool instancing = false;
- gpIterData(Instance &inst_, Object *ob_)
- : inst(inst_), ob(ob_), material_array(inst_.materials.material_array_get(ob_))
+ gpIterData(Instance &inst_, Object *ob_, ObjectHandle &ob_handle)
+ : inst(inst_),
+ ob(ob_),
+ material_array(inst_.materials.material_array_get(
+ ob_, inst_.velocity.step_object_sync(ob, ob_handle.object_key, ob_handle.recalc)))
{
cfra = DEG_get_ctime(inst.depsgraph);
};
@@ -253,16 +255,12 @@ void SyncModule::sync_gpencil(Object *ob, ObjectHandle &ob_handle)
/* TODO(fclem): Waiting for a user option to use the render engine instead of gpencil engine. */
return;
- gpIterData iter(inst_, ob);
+ gpIterData iter(inst_, ob, ob_handle);
BKE_gpencil_visible_stroke_iter((bGPdata *)ob->data, nullptr, gpencil_stroke_sync, &iter);
gpencil_drawcall_flush(iter);
- UNUSED_VARS(ob_handle);
- /* TODO(fclem) Gpencil velocity. */
- // shading_passes.velocity.gpencil_add(ob, ob_handle);
-
// bool is_caster = true; /* TODO material.shadow.shgrp. */
// bool is_alpha_blend = true; /* TODO material.is_alpha_blend. */
// shadows.sync_object(ob, ob_handle, is_caster, is_alpha_blend);
@@ -304,12 +302,13 @@ void SyncModule::sync_curves(Object *ob, ObjectHandle &ob_handle, ModifierData *
mat_nr = part_settings->omat;
}
- Material &material = inst_.materials.material_get(ob, mat_nr - 1, MAT_GEOM_CURVES);
+ bool has_motion = inst_.velocity.step_object_sync(ob, ob_handle.object_key, ob_handle.recalc);
+ Material &material = inst_.materials.material_get(ob, has_motion, mat_nr - 1, MAT_GEOM_CURVES);
shgroup_curves_call(material.shading, ob, part_sys, modifier_data);
shgroup_curves_call(material.prepass, ob, part_sys, modifier_data);
shgroup_curves_call(material.shadow, ob, part_sys, modifier_data);
- UNUSED_VARS(ob_handle);
+
/* TODO(fclem) Hair velocity. */
// shading_passes.velocity.gpencil_add(ob, ob_handle);
diff --git a/source/blender/draw/engines/eevee_next/eevee_sync.hh b/source/blender/draw/engines/eevee_next/eevee_sync.hh
index 34357193d3e..bd8147a2882 100644
--- a/source/blender/draw/engines/eevee_next/eevee_sync.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_sync.hh
@@ -28,6 +28,7 @@ class Instance;
/** \name ObjectKey
*
* Unique key to identify each object in the hash-map.
+ * Note that we get a unique key for each object component.
* \{ */
struct ObjectKey {
diff --git a/source/blender/draw/engines/eevee_next/eevee_velocity.cc b/source/blender/draw/engines/eevee_next/eevee_velocity.cc
new file mode 100644
index 00000000000..c556ba21fb5
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/eevee_velocity.cc
@@ -0,0 +1,420 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later
+ * Copyright 2021 Blender Foundation.
+ */
+
+/** \file
+ * \ingroup eevee
+ *
+ * The velocity pass outputs motion vectors to use for either
+ * temporal re-projection or motion blur.
+ *
+ * It is the module that tracks the objects between frames updates.
+ *
+ * #VelocityModule contains all motion steps data and logic.
+ * #VelocityPass contains the resolve pass for static geometry.
+ * #VelocityView is a per view instance that contain the velocity buffer.
+ */
+
+#include "BKE_duplilist.h"
+#include "BKE_object.h"
+#include "BLI_map.hh"
+#include "DEG_depsgraph_query.h"
+#include "DNA_rigidbody_types.h"
+
+#include "eevee_instance.hh"
+// #include "eevee_renderpasses.hh"
+#include "eevee_shader.hh"
+#include "eevee_shader_shared.hh"
+#include "eevee_velocity.hh"
+
+namespace blender::eevee {
+
+/* -------------------------------------------------------------------- */
+/** \name VelocityModule
+ *
+ * \{ */
+
+void VelocityModule::init()
+{
+#if 0 /* TODO renderpasses */
+ if (inst_.render && (inst_.render_passes.vector != nullptr)) {
+ /* No motion blur and the vector pass was requested. Do the step sync here. */
+ const Scene *scene = inst_.scene;
+ float initial_time = scene->r.cfra + scene->r.subframe;
+ step_sync(STEP_PREVIOUS, initial_time - 1.0f);
+ step_sync(STEP_NEXT, initial_time + 1.0f);
+ inst_.set_time(initial_time);
+ }
+#endif
+}
+
+static void step_object_sync_render(void *velocity,
+ Object *ob,
+ RenderEngine *UNUSED(engine),
+ Depsgraph *UNUSED(depsgraph))
+{
+ ObjectKey object_key(ob);
+ reinterpret_cast<VelocityModule *>(velocity)->step_object_sync(ob, object_key);
+}
+
+void VelocityModule::step_sync(eVelocityStep step, float time)
+{
+ inst_.set_time(time);
+ step_ = step;
+ object_steps_usage[step_] = 0;
+ step_camera_sync();
+ DRW_render_object_iter(this, inst_.render, inst_.depsgraph, step_object_sync_render);
+}
+
+void VelocityModule::step_camera_sync()
+{
+ inst_.camera.sync();
+ *camera_steps[step_] = inst_.camera.data_get();
+}
+
+bool VelocityModule::step_object_sync(Object *ob,
+ ObjectKey &object_key,
+ int /* IDRecalcFlag */ recalc)
+{
+ bool has_motion = object_has_velocity(ob) || (recalc & ID_RECALC_TRANSFORM);
+ /* NOTE: Fragile. This will only work with 1 frame of lag since we can't record every geometry
+ * just in case there might be an update the next frame. */
+ bool has_deform = object_is_deform(ob) || (recalc & ID_RECALC_GEOMETRY);
+
+ if (!has_motion && !has_deform) {
+ return false;
+ }
+
+ uint32_t resource_id = DRW_object_resource_id_get(ob);
+
+ /* Object motion. */
+ /* FIXME(fclem) As we are using original objects pointers, there is a chance the previous
+ * object key matches a totally different object if the scene was changed by user or python
+ * callback. In this case, we cannot correctly match objects between updates.
+ * What this means is that there will be incorrect motion vectors for these objects.
+ * We live with that until we have a correct way of identifying new objects. */
+ VelocityObjectData &vel = velocity_map.lookup_or_add_default(object_key);
+ vel.obj.ofs[step_] = object_steps_usage[step_]++;
+ vel.obj.resource_id = resource_id;
+ vel.id = (ID *)ob->data;
+ object_steps[step_]->get_or_resize(vel.obj.ofs[step_]) = ob->obmat;
+ if (step_ == STEP_CURRENT) {
+ /* Replace invalid steps. Can happen if object was hidden in one of those steps. */
+ if (vel.obj.ofs[STEP_PREVIOUS] == -1) {
+ vel.obj.ofs[STEP_PREVIOUS] = object_steps_usage[STEP_PREVIOUS]++;
+ object_steps[STEP_PREVIOUS]->get_or_resize(vel.obj.ofs[STEP_PREVIOUS]) = ob->obmat;
+ }
+ if (vel.obj.ofs[STEP_NEXT] == -1) {
+ vel.obj.ofs[STEP_NEXT] = object_steps_usage[STEP_NEXT]++;
+ object_steps[STEP_NEXT]->get_or_resize(vel.obj.ofs[STEP_NEXT]) = ob->obmat;
+ }
+ }
+
+ /* Geometry motion. */
+ if (has_deform) {
+ auto add_cb = [&]() {
+ VelocityGeometryData data;
+ switch (ob->type) {
+ case OB_CURVES:
+ data.pos_buf = DRW_curves_pos_buffer_get(ob);
+ break;
+ default:
+ data.pos_buf = DRW_cache_object_pos_vertbuf_get(ob);
+ break;
+ }
+ return data;
+ };
+
+ const VelocityGeometryData &data = geometry_map.lookup_or_add_cb(vel.id, add_cb);
+
+ if (data.pos_buf == nullptr) {
+ has_deform = false;
+ }
+ }
+
+ /* Avoid drawing object that has no motions but were tagged as such. */
+ if (step_ == STEP_CURRENT && has_motion == true && has_deform == false) {
+ float4x4 &obmat_curr = (*object_steps[STEP_CURRENT])[vel.obj.ofs[STEP_CURRENT]];
+ float4x4 &obmat_prev = (*object_steps[STEP_PREVIOUS])[vel.obj.ofs[STEP_PREVIOUS]];
+ float4x4 &obmat_next = (*object_steps[STEP_NEXT])[vel.obj.ofs[STEP_NEXT]];
+ if (inst_.is_viewport()) {
+ has_motion = (obmat_curr != obmat_prev);
+ }
+ else {
+ has_motion = (obmat_curr != obmat_prev || obmat_curr != obmat_next);
+ }
+ }
+
+#if 0
+ if (!has_motion && !has_deform) {
+ std::cout << "Detected no motion on " << ob->id.name << std::endl;
+ }
+ if (has_deform) {
+ std::cout << "Geometry Motion on " << ob->id.name << std::endl;
+ }
+ if (has_motion) {
+ std::cout << "Object Motion on " << ob->id.name << std::endl;
+ }
+#endif
+
+ if (!has_motion && !has_deform) {
+ return false;
+ }
+
+ /* TODO(@fclem): Reset sampling here? Should ultimately be covered by depsgraph update tags. */
+ // inst_.sampling.reset();
+
+ return true;
+}
+
+/**
+ * Moves next frame data to previous frame data. Nullify next frame data.
+ * IMPORTANT: This runs AFTER drawing in the viewport (so after `begin_sync()`) but BEFORE drawing
+ * in render mode (so before `begin_sync()`). In viewport the data will be used the next frame.
+ */
+void VelocityModule::step_swap()
+{
+ {
+ /* Now that vertex buffers are garanteed to be updated, proceed with
+ * offset computation and copy into the geometry step buffer. */
+ uint dst_ofs = 0;
+ for (VelocityGeometryData &geom : geometry_map.values()) {
+ uint src_len = GPU_vertbuf_get_vertex_len(geom.pos_buf);
+ geom.len = src_len;
+ geom.ofs = dst_ofs;
+ dst_ofs += src_len;
+ }
+ /* TODO(fclem): Fail gracefully (disable motion blur + warning print) if tot_len *
+ * sizeof(float4) is greater than max SSBO size. */
+ geometry_steps[step_]->resize(max_ii(16, dst_ofs));
+
+ for (VelocityGeometryData &geom : geometry_map.values()) {
+ GPU_storagebuf_copy_sub_from_vertbuf(*geometry_steps[step_],
+ geom.pos_buf,
+ geom.ofs * sizeof(float4),
+ 0,
+ geom.len * sizeof(float4));
+ }
+ /* Copy back the #VelocityGeometryIndex into #VelocityObjectData which are
+ * indexed using persistent keys (unlike geometries which are indexed by volatile ID). */
+ for (VelocityObjectData &vel : velocity_map.values()) {
+ const VelocityGeometryData &geom = geometry_map.lookup_default(vel.id,
+ VelocityGeometryData());
+ vel.geo.len[step_] = geom.len;
+ vel.geo.ofs[step_] = geom.ofs;
+ /* Avoid reuse. */
+ vel.id = nullptr;
+ }
+
+ geometry_map.clear();
+ }
+
+ auto swap_steps = [&](eVelocityStep step_a, eVelocityStep step_b) {
+ SWAP(VelocityObjectBuf *, object_steps[step_a], object_steps[step_b]);
+ SWAP(VelocityGeometryBuf *, geometry_steps[step_a], geometry_steps[step_b]);
+ SWAP(CameraDataBuf *, camera_steps[step_a], camera_steps[step_b]);
+
+ for (VelocityObjectData &vel : velocity_map.values()) {
+ vel.obj.ofs[step_a] = vel.obj.ofs[step_b];
+ vel.obj.ofs[step_b] = (uint)-1;
+ vel.geo.ofs[step_a] = vel.geo.ofs[step_b];
+ vel.geo.len[step_a] = vel.geo.len[step_b];
+ vel.geo.ofs[step_b] = (uint)-1;
+ vel.geo.len[step_b] = (uint)-1;
+ }
+ };
+
+ if (inst_.is_viewport()) {
+ /* For viewport we only use the last rendered redraw as previous frame.
+ * We swap current with previous step at the end of a redraw.
+ * We do not support motion blur as it is rendered to avoid conflicting motions
+ * for temporal reprojection. */
+ swap_steps(eVelocityStep::STEP_PREVIOUS, eVelocityStep::STEP_CURRENT);
+ }
+ else {
+ /* Render case: The STEP_CURRENT is left untouched. */
+ swap_steps(eVelocityStep::STEP_PREVIOUS, eVelocityStep::STEP_NEXT);
+ }
+}
+
+void VelocityModule::begin_sync()
+{
+ if (inst_.is_viewport()) {
+ /* Viewport always evaluate current step. */
+ step_ = STEP_CURRENT;
+ }
+ step_camera_sync();
+ object_steps_usage[step_] = 0;
+}
+
+/* This is the end of the current frame sync. Not the step_sync. */
+void VelocityModule::end_sync()
+{
+ Vector<ObjectKey, 0> deleted_obj;
+
+ uint32_t max_resource_id_ = 0u;
+
+ for (Map<ObjectKey, VelocityObjectData>::Item item : velocity_map.items()) {
+ if (item.value.obj.resource_id == (uint)-1) {
+ deleted_obj.append(item.key);
+ }
+ else {
+ max_resource_id_ = max_uu(max_resource_id_, item.value.obj.resource_id);
+ }
+ }
+
+ if (deleted_obj.size() > 0) {
+ // inst_.sampling.reset();
+ }
+
+ for (auto key : deleted_obj) {
+ velocity_map.remove(key);
+ }
+
+ indirection_buf.resize(power_of_2_max_u(max_resource_id_ + 1));
+
+ /* Avoid uploading more data to the GPU as well as an extra level of
+ * indirection on the GPU by copying back offsets the to VelocityIndex. */
+ for (VelocityObjectData &vel : velocity_map.values()) {
+ /* Disable deform if vertex count mismatch. */
+ if (inst_.is_viewport()) {
+ /* Current geometry step will be copied at the end of the frame.
+ * Thus vel.geo.len[STEP_CURRENT] is not yet valid and the current length is manually
+ * retrieved. */
+ GPUVertBuf *pos_buf = geometry_map.lookup_default(vel.id, VelocityGeometryData()).pos_buf;
+ vel.geo.do_deform = pos_buf != nullptr &&
+ (vel.geo.len[STEP_PREVIOUS] == GPU_vertbuf_get_vertex_len(pos_buf));
+ }
+ else {
+ vel.geo.do_deform = (vel.geo.len[STEP_PREVIOUS] == vel.geo.len[STEP_CURRENT]) &&
+ (vel.geo.len[STEP_NEXT] == vel.geo.len[STEP_CURRENT]);
+ }
+ indirection_buf[vel.obj.resource_id] = vel;
+ /* Reset for next sync. */
+ vel.obj.resource_id = (uint)-1;
+ }
+
+ object_steps[STEP_PREVIOUS]->push_update();
+ object_steps[STEP_NEXT]->push_update();
+ camera_steps[STEP_PREVIOUS]->push_update();
+ camera_steps[STEP_CURRENT]->push_update();
+ camera_steps[STEP_NEXT]->push_update();
+ indirection_buf.push_update();
+
+ {
+ resolve_ps_ = DRW_pass_create("Velocity.Resolve", (DRWState)0);
+ GPUShader *sh = inst_.shaders.static_shader_get(VELOCITY_RESOLVE);
+ DRWShadingGroup *grp = DRW_shgroup_create(sh, resolve_ps_);
+ DRW_shgroup_uniform_texture_ref(grp, "depth_tx", &input_depth_tx_);
+ DRW_shgroup_uniform_image_ref(grp, "velocity_view_img", &velocity_view_tx_);
+ DRW_shgroup_uniform_image_ref(grp, "velocity_camera_img", &velocity_camera_tx_);
+ DRW_shgroup_uniform_block(grp, "camera_prev", *camera_steps[STEP_PREVIOUS]);
+ DRW_shgroup_uniform_block(grp, "camera_curr", *camera_steps[STEP_CURRENT]);
+ DRW_shgroup_uniform_block(grp, "camera_next", *camera_steps[STEP_NEXT]);
+ DRW_shgroup_call_compute_ref(grp, resolve_dispatch_size_);
+ }
+}
+
+bool VelocityModule::object_has_velocity(const Object *ob)
+{
+#if 0
+ RigidBodyOb *rbo = ob->rigidbody_object;
+ /* Active rigidbody objects only, as only those are affected by sim. */
+ const bool has_rigidbody = (rbo && (rbo->type == RBO_TYPE_ACTIVE));
+ /* For now we assume dupli objects are moving. */
+ const bool is_dupli = (ob->base_flag & BASE_FROM_DUPLI) != 0;
+ const bool object_moves = is_dupli || has_rigidbody || BKE_object_moves_in_time(ob, true);
+#else
+ UNUSED_VARS(ob);
+ /* BKE_object_moves_in_time does not work in some cases.
+ * Better detect non moving object after evaluation. */
+ const bool object_moves = true;
+#endif
+ return object_moves;
+}
+
+bool VelocityModule::object_is_deform(const Object *ob)
+{
+ RigidBodyOb *rbo = ob->rigidbody_object;
+ /* Active rigidbody objects only, as only those are affected by sim. */
+ const bool has_rigidbody = (rbo && (rbo->type == RBO_TYPE_ACTIVE));
+ const bool is_deform = BKE_object_is_deform_modified(inst_.scene, (Object *)ob) ||
+ (has_rigidbody && (rbo->flag & RBO_FLAG_USE_DEFORM) != 0);
+
+ return is_deform;
+}
+
+void VelocityModule::bind_resources(DRWShadingGroup *grp)
+{
+ /* For viewport, only previous motion is supported.
+ * Still bind previous step to avoid undefined behavior. */
+ eVelocityStep next = inst_.is_viewport() ? STEP_PREVIOUS : STEP_NEXT;
+ DRW_shgroup_storage_block_ref(grp, "velocity_obj_prev_buf", &(*object_steps[STEP_PREVIOUS]));
+ DRW_shgroup_storage_block_ref(grp, "velocity_obj_next_buf", &(*object_steps[next]));
+ DRW_shgroup_storage_block_ref(grp, "velocity_geo_prev_buf", &(*geometry_steps[STEP_PREVIOUS]));
+ DRW_shgroup_storage_block_ref(grp, "velocity_geo_next_buf", &(*geometry_steps[next]));
+ DRW_shgroup_uniform_block_ref(grp, "camera_prev", &(*camera_steps[STEP_PREVIOUS]));
+ DRW_shgroup_uniform_block_ref(grp, "camera_curr", &(*camera_steps[STEP_CURRENT]));
+ DRW_shgroup_uniform_block_ref(grp, "camera_next", &(*camera_steps[next]));
+ DRW_shgroup_storage_block_ref(grp, "velocity_indirection_buf", &indirection_buf);
+}
+
+/* Resolve pass for static geometry and to camera space projection. */
+void VelocityModule::resolve_camera_motion(GPUTexture *depth_tx,
+ GPUTexture *velocity_view_tx,
+ GPUTexture *velocity_camera_tx)
+{
+ input_depth_tx_ = depth_tx;
+ velocity_view_tx_ = velocity_view_tx;
+ velocity_camera_tx_ = velocity_camera_tx;
+
+ resolve_dispatch_size_.x = divide_ceil_u(GPU_texture_width(depth_tx), 8);
+ resolve_dispatch_size_.y = divide_ceil_u(GPU_texture_height(depth_tx), 8);
+
+ DRW_draw_pass(resolve_ps_);
+}
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name Velocity View
+ * \{ */
+
+void VelocityView::sync()
+{
+ /* TODO: Remove. */
+ velocity_view_tx_.sync();
+ velocity_camera_tx_.sync();
+}
+
+void VelocityView::acquire(int2 extent)
+{
+ /* WORKAROUND: View name should be unique and static.
+ * With this, we can reuse the same texture across views. */
+ DrawEngineType *owner = (DrawEngineType *)view_name_.c_str();
+
+ /* Only RG16F when only doing only reprojection or motion blur. */
+ eGPUTextureFormat format = inst_.is_viewport() ? GPU_RG16F : GPU_RGBA16F;
+ velocity_view_tx_.acquire(extent, format, owner);
+ if (false /* TODO(fclem): Panoramic camera. */) {
+ velocity_camera_tx_.acquire(extent, format, owner);
+ }
+ else {
+ velocity_camera_tx_.acquire(int2(1), format, owner);
+ }
+}
+
+void VelocityView::resolve(GPUTexture *depth_tx)
+{
+ inst_.velocity.resolve_camera_motion(depth_tx, velocity_view_tx_, velocity_camera_tx_);
+}
+
+void VelocityView::release()
+{
+ velocity_view_tx_.release();
+ velocity_camera_tx_.release();
+}
+
+/** \} */
+
+} // namespace blender::eevee
diff --git a/source/blender/draw/engines/eevee_next/eevee_velocity.hh b/source/blender/draw/engines/eevee_next/eevee_velocity.hh
new file mode 100644
index 00000000000..1bfd9f8c18f
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/eevee_velocity.hh
@@ -0,0 +1,178 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later
+ * Copyright 2021 Blender Foundation.
+ */
+
+/** \file
+ * \ingroup eevee
+ *
+ * The velocity pass outputs motion vectors to use for either
+ * temporal re-projection or motion blur.
+ *
+ * It is the module that tracks the objects data between frames updates.
+ */
+
+#pragma once
+
+#include "BLI_map.hh"
+
+#include "eevee_shader_shared.hh"
+#include "eevee_sync.hh"
+
+namespace blender::eevee {
+
+/* -------------------------------------------------------------------- */
+/** \name VelocityModule
+ *
+ * \{ */
+
+/** Container for scene velocity data. */
+class VelocityModule {
+ friend class VelocityView;
+
+ public:
+ struct VelocityObjectData : public VelocityIndex {
+ /** ID to retrieve the corresponding #VelocityGeometryData after copy. */
+ ID *id;
+ };
+ struct VelocityGeometryData {
+ /** VertBuf not yet ready to be copied to the #VelocityGeometryBuf. */
+ GPUVertBuf *pos_buf = nullptr;
+ /* Offset in the #VelocityGeometryBuf to the start of the data. In vertex. */
+ int ofs;
+ /* Length of the vertex buffer. In vertex. */
+ int len;
+ };
+ /**
+ * The map contains indirection indices to the obmat and geometry in each step buffer.
+ * Note that each object component gets its own resource id so one component correspond to one
+ * geometry offset.
+ */
+ Map<ObjectKey, VelocityObjectData> velocity_map;
+ /** Geometry to be copied to VelocityGeometryBuf. Indexed by evaluated ID *. Empty after */
+ Map<ID *, VelocityGeometryData> geometry_map;
+ /** Contains all objects matrices for each time step. */
+ std::array<VelocityObjectBuf *, 3> object_steps;
+ /** Contains all Geometry steps from deforming objects for each time step. */
+ std::array<VelocityGeometryBuf *, 3> geometry_steps;
+ /** Number of occupied slot in each `object_steps`. */
+ int3 object_steps_usage = int3(0);
+ /** Buffer of all #VelocityIndex used in this frame. Indexed by draw manager resource id. */
+ VelocityIndexBuf indirection_buf;
+
+ /**
+ * Copies of camera data. One for previous and one for next time step.
+ */
+ std::array<CameraDataBuf *, 3> camera_steps;
+
+ private:
+ Instance &inst_;
+
+ eVelocityStep step_ = STEP_CURRENT;
+
+ DRWPass *resolve_ps_ = nullptr;
+
+ /** Reference only. Not owned. */
+ GPUTexture *input_depth_tx_;
+ GPUTexture *velocity_view_tx_;
+ GPUTexture *velocity_camera_tx_;
+
+ int3 resolve_dispatch_size_ = int3(1, 1, 1);
+
+ public:
+ VelocityModule(Instance &inst) : inst_(inst)
+ {
+ for (VelocityObjectBuf *&step_buf : object_steps) {
+ step_buf = new VelocityObjectBuf();
+ }
+ for (VelocityGeometryBuf *&step_buf : geometry_steps) {
+ step_buf = new VelocityGeometryBuf();
+ }
+ for (CameraDataBuf *&step_buf : camera_steps) {
+ step_buf = new CameraDataBuf();
+ }
+ };
+
+ ~VelocityModule()
+ {
+ for (VelocityObjectBuf *step_buf : object_steps) {
+ delete step_buf;
+ }
+ for (VelocityGeometryBuf *step_buf : geometry_steps) {
+ delete step_buf;
+ }
+ for (CameraDataBuf *step_buf : camera_steps) {
+ delete step_buf;
+ }
+ }
+
+ void init();
+
+ void step_camera_sync();
+ void step_sync(eVelocityStep step, float time);
+
+ /* Gather motion data. Returns true if the object **can** have motion. */
+ bool step_object_sync(Object *ob, ObjectKey &ob_key, int recalc = 0);
+
+ /* Moves next frame data to previous frame data. Nullify next frame data. */
+ void step_swap();
+
+ void begin_sync();
+ void end_sync();
+
+ void bind_resources(DRWShadingGroup *grp);
+
+ private:
+ bool object_has_velocity(const Object *ob);
+ bool object_is_deform(const Object *ob);
+
+ void resolve_camera_motion(GPUTexture *depth_tx,
+ GPUTexture *velocity_view_tx,
+ GPUTexture *velocity_camera_tx);
+};
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name Velocity
+ *
+ * \{ */
+
+/**
+ * Per view module.
+ */
+class VelocityView {
+ private:
+ Instance &inst_;
+
+ StringRefNull view_name_;
+
+ TextureFromPool velocity_camera_tx_ = {"velocity_camera_tx_"};
+ TextureFromPool velocity_view_tx_ = {"velocity_view_tx_"};
+
+ public:
+ VelocityView(Instance &inst, const char *name) : inst_(inst), view_name_(name){};
+ ~VelocityView(){};
+
+ void sync();
+
+ void acquire(int2 extent);
+ void release();
+
+ void resolve(GPUTexture *depth_tx);
+
+ /**
+ * Getters
+ **/
+ GPUTexture *view_vectors_get() const
+ {
+ return velocity_view_tx_;
+ }
+ GPUTexture *camera_vectors_get() const
+ {
+ return (velocity_camera_tx_.is_valid()) ? velocity_camera_tx_ : velocity_view_tx_;
+ }
+};
+
+/** \} */
+
+} // namespace blender::eevee
diff --git a/source/blender/draw/engines/eevee_next/eevee_view.cc b/source/blender/draw/engines/eevee_next/eevee_view.cc
index de7341f814b..e21342c5ef6 100644
--- a/source/blender/draw/engines/eevee_next/eevee_view.cc
+++ b/source/blender/draw/engines/eevee_next/eevee_view.cc
@@ -86,7 +86,7 @@ void ShadingView::sync(int2 render_extent_)
// dof_.sync(winmat_p, extent_);
// mb_.sync(extent_);
- // velocity_.sync(extent_);
+ velocity_.sync();
// rt_buffer_opaque_.sync(extent_);
// rt_buffer_refract_.sync(extent_);
// inst_.hiz_back.view_sync(extent_);
@@ -108,22 +108,30 @@ void ShadingView::render()
* With this, we can reuse the same texture across views. */
DrawEngineType *owner = (DrawEngineType *)name_;
+ DefaultTextureList *dtxl = DRW_viewport_texture_list_get();
+
depth_tx_.ensure_2d(GPU_DEPTH24_STENCIL8, extent_);
combined_tx_.acquire(extent_, GPU_RGBA16F, owner);
- view_fb_.ensure(GPU_ATTACHMENT_TEXTURE(depth_tx_), GPU_ATTACHMENT_TEXTURE(combined_tx_));
+ velocity_.acquire(extent_);
+ // combined_fb_.ensure(GPU_ATTACHMENT_TEXTURE(depth_tx_), GPU_ATTACHMENT_TEXTURE(combined_tx_));
+ // prepass_fb_.ensure(GPU_ATTACHMENT_TEXTURE(depth_tx_),
+ // GPU_ATTACHMENT_TEXTURE(velocity_.view_vectors_get()));
+ combined_fb_.ensure(GPU_ATTACHMENT_TEXTURE(dtxl->depth), GPU_ATTACHMENT_TEXTURE(dtxl->color));
+ prepass_fb_.ensure(GPU_ATTACHMENT_TEXTURE(dtxl->depth),
+ GPU_ATTACHMENT_TEXTURE(velocity_.view_vectors_get()));
update_view();
DRW_stats_group_start(name_);
// DRW_view_set_active(render_view_);
+ float4 clear_velocity(VELOCITY_INVALID);
+ GPU_framebuffer_bind(prepass_fb_);
+ GPU_framebuffer_clear_color(prepass_fb_, clear_velocity);
/* Alpha stores transmittance. So start at 1. */
float4 clear_color = {0.0f, 0.0f, 0.0f, 1.0f};
- // GPU_framebuffer_bind(view_fb_);
- // GPU_framebuffer_clear_color_depth(view_fb_, clear_color, 1.0f);
- DefaultFramebufferList *dfbl = DRW_viewport_framebuffer_list_get();
- GPU_framebuffer_bind(dfbl->default_fb);
- GPU_framebuffer_clear_color_depth(dfbl->default_fb, clear_color, 1.0f);
+ GPU_framebuffer_bind(combined_fb_);
+ GPU_framebuffer_clear_color_depth(combined_fb_, clear_color, 1.0f);
inst_.pipelines.world.render();
@@ -134,12 +142,13 @@ void ShadingView::render()
// inst_.lookdev.render_overlay(view_fb_);
- inst_.pipelines.forward.render(render_view_, depth_tx_, combined_tx_);
+ inst_.pipelines.forward.render(render_view_, prepass_fb_, combined_fb_, depth_tx_, combined_tx_);
// inst_.lights.debug_draw(view_fb_);
// inst_.shadows.debug_draw(view_fb_);
- // velocity_.render(depth_tx_);
+ // velocity_.resolve(depth_tx_);
+ velocity_.resolve(dtxl->depth);
// if (inst_.render_passes.vector) {
// inst_.render_passes.vector->accumulate(velocity_.camera_vectors_get(), sub_view_);
@@ -159,6 +168,7 @@ void ShadingView::render()
combined_tx_.release();
postfx_tx_.release();
+ velocity_.release();
}
GPUTexture *ShadingView::render_post(GPUTexture *input_tx)
diff --git a/source/blender/draw/engines/eevee_next/eevee_view.hh b/source/blender/draw/engines/eevee_next/eevee_view.hh
index e78a3222d8b..95ec1760c63 100644
--- a/source/blender/draw/engines/eevee_next/eevee_view.hh
+++ b/source/blender/draw/engines/eevee_next/eevee_view.hh
@@ -21,6 +21,7 @@
#include "eevee_camera.hh"
#include "eevee_pipeline.hh"
#include "eevee_shader.hh"
+#include "eevee_velocity.hh"
namespace blender::eevee {
@@ -43,13 +44,14 @@ class ShadingView {
/** Post-fx modules. */
// DepthOfField dof_;
// MotionBlur mb_;
- // Velocity velocity_;
+ VelocityView velocity_;
/** Raytracing persistent buffers. Only opaque and refraction can have surface tracing. */
// RaytraceBuffer rt_buffer_opaque_;
// RaytraceBuffer rt_buffer_refract_;
- Framebuffer view_fb_;
+ Framebuffer prepass_fb_;
+ Framebuffer combined_fb_;
Texture depth_tx_;
TextureFromPool combined_tx_;
TextureFromPool postfx_tx_;
@@ -69,7 +71,7 @@ class ShadingView {
public:
ShadingView(Instance &inst, const char *name, const float (*face_matrix)[4])
- : inst_(inst), name_(name), face_matrix_(face_matrix){};
+ : inst_(inst), name_(name), face_matrix_(face_matrix), velocity_(inst, name){};
~ShadingView(){};
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
index 708bd153e84..a9ee710ec99 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
@@ -5,6 +5,7 @@
#pragma BLENDER_REQUIRE(eevee_attributes_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_nodetree_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
void main()
{
@@ -27,6 +28,18 @@ void main()
interp.N = cross(T, interp.curves_binormal);
interp.curves_strand_id = hair_get_strand_id();
interp.barycentric_coords = hair_get_barycentric();
+#ifdef MAT_VELOCITY
+ /* Due to the screen space nature of the vertex positioning, we compute only the motion of curve
+ * strand, not its cylinder. Otherwise we would add the rotation velocity. */
+ vec3 prv, nxt;
+ velocity_local_pos_get(pos, hair_get_base_id(), prv, nxt);
+ /* FIXME(fclem): Evaluating before displacement avoid displacement being treated as motion but
+ * ignores motion from animated displacement. Supporting animated displacement motion vectors
+ * would require evaluating the nodetree multiple time with different nodetree UBOs evaluated at
+ * different times, but also with different attributes (maybe we could assume static attribute at
+ * least). */
+ velocity_vertex(P_prev, P_curr, P_next, motion.prev, motion.next);
+#endif
init_globals();
attrib_load();
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
index 5b404ec5237..c60527162f7 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
@@ -3,6 +3,7 @@
#pragma BLENDER_REQUIRE(common_view_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_attributes_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
void main()
{
@@ -38,6 +39,16 @@ void main()
aspect,
thickness,
hardness);
+#ifdef MAT_VELOCITY
+ /* GPencil do not support deformation motion blur. */
+ vec3 lP_curr = transform_point(ModelMatrixInverse, interp.P);
+ /* FIXME(fclem): Evaluating before displacement avoid displacement being treated as motion but
+ * ignores motion from animated displacement. Supporting animated displacement motion vectors
+ * would require evaluating the nodetree multiple time with different nodetree UBOs evaluated at
+ * different times, but also with different attributes (maybe we could assume static attribute at
+ * least). */
+ velocity_vertex(lP_curr, lP_curr, lP_curr, motion.prev, motion.next);
+#endif
init_globals();
attrib_load();
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl
index 7b38057f41a..c07a8ae0eea 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl
@@ -3,6 +3,7 @@
#pragma BLENDER_REQUIRE(eevee_attributes_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_nodetree_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
void main()
{
@@ -10,6 +11,16 @@ void main()
interp.P = point_object_to_world(pos);
interp.N = normal_object_to_world(nor);
+#ifdef MAT_VELOCITY
+ vec3 prv, nxt;
+ velocity_local_pos_get(pos, gl_VertexID, prv, nxt);
+ /* FIXME(fclem): Evaluating before displacement avoid displacement being treated as motion but
+ * ignores motion from animated displacement. Supporting animated displacement motion vectors
+ * would require evaluating the nodetree multiple time with different nodetree UBOs evaluated at
+ * different times, but also with different attributes (maybe we could assume static attribute at
+ * least). */
+ velocity_vertex(prv, pos, nxt, motion.prev, motion.next);
+#endif
init_globals();
attrib_load();
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl
index 002eed91130..7ddf941df7c 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl
@@ -8,6 +8,7 @@
#pragma BLENDER_REQUIRE(common_hair_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_nodetree_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
/* From the paper "Hashed Alpha Testing" by Chris Wyman and Morgan McGuire. */
float hash(vec2 a)
@@ -69,4 +70,16 @@ void main()
discard;
}
#endif
+
+#ifdef MAT_VELOCITY
+ vec4 out_velocity_camera; /* TODO(fclem): Panoramic cameras. */
+ velocity_camera(interp.P + motion.prev,
+ interp.P,
+ interp.P - motion.next,
+ out_velocity_camera,
+ out_velocity_view);
+
+ /* For testing in viewport. */
+ out_velocity_view.zw = vec2(0.0);
+#endif
}
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl
new file mode 100644
index 00000000000..435ae6658c9
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl
@@ -0,0 +1,101 @@
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_camera_lib.glsl)
+
+#ifdef VELOCITY_CAMERA
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns uv space motion vectors in pairs (motion_prev.xy, motion_next.xy)
+ */
+vec4 velocity_view(vec3 P_prev, vec3 P, vec3 P_next)
+{
+ vec2 prev_uv, curr_uv, next_uv;
+
+ prev_uv = transform_point(ProjectionMatrix, transform_point(camera_prev.viewmat, P_prev)).xy;
+ curr_uv = transform_point(ViewProjectionMatrix, P).xy;
+ next_uv = transform_point(ProjectionMatrix, transform_point(camera_next.viewmat, P_next)).xy;
+
+ vec4 motion;
+ motion.xy = prev_uv - curr_uv;
+ motion.zw = curr_uv - next_uv;
+ /* Convert NDC velocity to UV velocity */
+ motion *= 0.5;
+
+ return motion;
+}
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns uv space motion vectors in pairs (motion_prev.xy, motion_next.xy)
+ * \a velocity_camera is the motion in film UV space after camera projection.
+ * \a velocity_view is the motion in ShadingView UV space. It is different
+ * from velocity_camera for multi-view rendering.
+ */
+void velocity_camera(vec3 P_prev, vec3 P, vec3 P_next, out vec4 vel_camera, out vec4 vel_view)
+{
+ vec2 prev_uv, curr_uv, next_uv;
+ prev_uv = camera_uv_from_world(camera_prev, P_prev);
+ curr_uv = camera_uv_from_world(camera_curr, P);
+ next_uv = camera_uv_from_world(camera_next, P_next);
+
+ vel_camera.xy = prev_uv - curr_uv;
+ vel_camera.zw = curr_uv - next_uv;
+
+ if (is_panoramic(camera_curr.type)) {
+ /* This path is only used if using using panoramic projections. Since the views always have
+ * the same 45° aperture angle, we can safely reuse the projection matrix. */
+ prev_uv = transform_point(ProjectionMatrix, transform_point(camera_prev.viewmat, P_prev)).xy;
+ curr_uv = transform_point(ViewProjectionMatrix, P).xy;
+ next_uv = transform_point(ProjectionMatrix, transform_point(camera_next.viewmat, P_next)).xy;
+
+ vel_view.xy = prev_uv - curr_uv;
+ vel_view.zw = curr_uv - next_uv;
+ /* Convert NDC velocity to UV velocity */
+ vel_view *= 0.5;
+ }
+ else {
+ vel_view = vel_camera;
+ }
+}
+
+#endif
+
+#ifdef MAT_VELOCITY
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns a tuple of world space motion deltas.
+ */
+void velocity_local_pos_get(vec3 lP, int vert_id, out vec3 lP_prev, out vec3 lP_next)
+{
+ VelocityIndex vel = velocity_indirection_buf[resource_id];
+ lP_next = lP_prev = lP;
+ if (vel.geo.do_deform) {
+ if (vel.geo.ofs[STEP_PREVIOUS] != -1) {
+ lP_prev = velocity_geo_prev_buf[vel.geo.ofs[STEP_PREVIOUS] + vert_id].xyz;
+ }
+ if (vel.geo.ofs[STEP_NEXT] != -1) {
+ lP_next = velocity_geo_next_buf[vel.geo.ofs[STEP_NEXT] + vert_id].xyz;
+ }
+ }
+}
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns a tuple of world space motion deltas.
+ */
+void velocity_vertex(
+ vec3 lP_prev, vec3 lP, vec3 lP_next, out vec3 motion_prev, out vec3 motion_next)
+{
+ VelocityIndex vel = velocity_indirection_buf[resource_id];
+ mat4 obmat_prev = velocity_obj_prev_buf[vel.obj.ofs[STEP_PREVIOUS]];
+ mat4 obmat_next = velocity_obj_next_buf[vel.obj.ofs[STEP_NEXT]];
+ vec3 P_prev = transform_point(obmat_prev, lP_prev);
+ vec3 P_next = transform_point(obmat_next, lP_next);
+ vec3 P = transform_point(ModelMatrix, lP);
+ motion_prev = P_prev - P;
+ motion_next = P_next - P;
+}
+
+#endif
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl
new file mode 100644
index 00000000000..b68b2eaf117
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl
@@ -0,0 +1,58 @@
+
+/**
+ * Fullscreen pass that compute motion vector for static geometry.
+ * Animated geometry has already written correct motion vectors.
+ */
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
+
+#define is_valid_output(img_) (imageSize(img_).x > 1)
+
+void main()
+{
+ ivec2 texel = ivec2(gl_GlobalInvocationID.xy);
+ vec4 motion = imageLoad(velocity_view_img, texel);
+
+ bool pixel_has_valid_motion = (motion.x != VELOCITY_INVALID);
+ float depth = texelFetch(depth_tx, texel, 0).r;
+ bool is_background = (depth == 1.0f);
+
+ vec2 uv = vec2(texel) * drw_view.viewport_size_inverse;
+ vec3 P_next, P_prev, P_curr;
+
+ if (pixel_has_valid_motion) {
+ /* Animated geometry. View motion already computed during prepass. Convert only to camera. */
+ // P_prev = get_world_space_from_depth(uv + motion.xy, 0.5);
+ // P_curr = get_world_space_from_depth(uv, 0.5);
+ // P_next = get_world_space_from_depth(uv + motion.zw, 0.5);
+ return;
+ }
+ else if (is_background) {
+ /* NOTE: Use viewCameraVec to avoid imprecision if camera is far from origin. */
+ vec3 vV = viewCameraVec(get_view_space_from_depth(uv, 1.0));
+ vec3 V = transform_direction(ViewMatrixInverse, vV);
+ /* Background has no motion under camera translation. Translate view vector with the camera. */
+ /* WATCH(fclem): Might create precision issues. */
+ P_next = camera_next.viewinv[3].xyz + V;
+ P_curr = camera_curr.viewinv[3].xyz + V;
+ P_prev = camera_prev.viewinv[3].xyz + V;
+ }
+ else {
+ /* Static geometry. No translation in world space. */
+ P_curr = get_world_space_from_depth(uv, depth);
+ P_prev = P_curr;
+ P_next = P_curr;
+ }
+
+ vec4 vel_camera, vel_view;
+ velocity_camera(P_prev, P_curr, P_next, vel_camera, vel_view);
+
+ if (in_texture_range(texel, depth_tx)) {
+ imageStore(velocity_view_img, texel, vel_view);
+
+ if (is_valid_output(velocity_camera_img)) {
+ imageStore(velocity_camera_img, texel, vel_camera);
+ }
+ }
+}
diff --git a/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh
index 12b8e085455..49250b5741e 100644
--- a/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh
+++ b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh
@@ -22,6 +22,7 @@ GPU_SHADER_CREATE_INFO(eevee_sampling_data)
* \{ */
GPU_SHADER_CREATE_INFO(eevee_geom_mesh)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_MESH")
.vertex_in(0, Type::VEC3, "pos")
.vertex_in(1, Type::VEC3, "nor")
@@ -29,16 +30,19 @@ GPU_SHADER_CREATE_INFO(eevee_geom_mesh)
.additional_info("draw_mesh", "draw_resource_id_varying", "draw_resource_handle");
GPU_SHADER_CREATE_INFO(eevee_geom_gpencil)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_GPENCIL")
.vertex_source("eevee_geom_gpencil_vert.glsl")
.additional_info("draw_gpencil", "draw_resource_id_varying", "draw_resource_handle");
GPU_SHADER_CREATE_INFO(eevee_geom_curves)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_CURVES")
.vertex_source("eevee_geom_curves_vert.glsl")
.additional_info("draw_hair", "draw_resource_id_varying", "draw_resource_handle");
GPU_SHADER_CREATE_INFO(eevee_geom_world)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_WORLD")
.builtins(BuiltinBits::VERTEX_ID)
.vertex_source("eevee_geom_world_vert.glsl")
diff --git a/source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh
new file mode 100644
index 00000000000..a5f16363466
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh
@@ -0,0 +1,55 @@
+
+#include "gpu_shader_create_info.hh"
+
+/* -------------------------------------------------------------------- */
+/** \name Surface Velocity
+ *
+ * Combined with the depth prepass shader.
+ * Outputs the view motion vectors for animated objects.
+ * \{ */
+
+/* Pass world space deltas to the fragment shader.
+ * This is to make sure that the resulting motion vectors are valid even with displacement. */
+GPU_SHADER_INTERFACE_INFO(eevee_velocity_surface_iface, "motion")
+ .smooth(Type::VEC3, "prev")
+ .smooth(Type::VEC3, "next");
+
+GPU_SHADER_CREATE_INFO(eevee_velocity_camera)
+ .define("VELOCITY_CAMERA")
+ .uniform_buf(1, "CameraData", "camera_prev")
+ .uniform_buf(2, "CameraData", "camera_curr")
+ .uniform_buf(3, "CameraData", "camera_next");
+
+GPU_SHADER_CREATE_INFO(eevee_velocity_geom)
+ .define("MAT_VELOCITY")
+ .auto_resource_location(true)
+ .storage_buf(4, Qualifier::READ, "mat4", "velocity_obj_prev_buf[]", Frequency::PASS)
+ .storage_buf(5, Qualifier::READ, "mat4", "velocity_obj_next_buf[]", Frequency::PASS)
+ .storage_buf(6, Qualifier::READ, "vec4", "velocity_geo_prev_buf[]", Frequency::PASS)
+ .storage_buf(7, Qualifier::READ, "vec4", "velocity_geo_next_buf[]", Frequency::PASS)
+ .storage_buf(
+ 7, Qualifier::READ, "VelocityIndex", "velocity_indirection_buf[]", Frequency::PASS)
+ .vertex_out(eevee_velocity_surface_iface)
+ .fragment_out(0, Type::VEC4, "out_velocity_view")
+ .additional_info("eevee_velocity_camera");
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name Velocity Resolve
+ *
+ * Computes velocity for static objects.
+ * Also converts motion to camera space (as opposed to view space) if needed.
+ * \{ */
+
+GPU_SHADER_CREATE_INFO(eevee_velocity_resolve)
+ .do_static_compilation(true)
+ .local_group_size(8, 8)
+ .sampler(0, ImageType::DEPTH_2D, "depth_tx")
+ .image(0, GPU_RG16F, Qualifier::READ_WRITE, ImageType::FLOAT_2D, "velocity_view_img")
+ .image(1, GPU_RG16F, Qualifier::WRITE, ImageType::FLOAT_2D, "velocity_camera_img")
+ .additional_info("eevee_shared")
+ .compute_source("eevee_velocity_resolve_comp.glsl")
+ .additional_info("draw_view", "eevee_velocity_camera");
+
+/** \} */