Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClément Foucault <foucault.clem@gmail.com>2022-05-18 23:12:07 +0300
committerClément Foucault <foucault.clem@gmail.com>2022-05-19 00:01:08 +0300
commit0fcfc4cc5be839da4cbd5aa84443b6954e6ebbf4 (patch)
tree17cf8e13d2550b37ab586286cf2cdbc6b05a744a /source/blender/draw/engines/eevee_next/shaders
parent33c5adba627b60d8a7504704bc5e9c80d7bd5ff0 (diff)
EEVEE-Next: Add Velocity module
This module allow tracking of object and geometry data accross time. This commit adds no user visible changes. It work in both viewport (*) and render mode, gives correct motion for any camera projection type and is compatible with displacement (**). It is a huge improvement upon the old EEVEE velocity which was only used for motion blur and only available in render. It is also an improvement for speed as the animated objects do not need to be rendered a 3rd time. The code is also much cleaner: no GPUVertBuf duplication, no GPUBatch amendment, no special cases for different geometry types, no DRWShadingGroup per object, no double buffering of velocity. The module is still work in progress as the final output may still be flawed. (*): Viewport support is already working but there might be some cases where mapping will fail. For instance if topology changes but not vertex count. (**): Displacement does not contribute to motion vectors. Surfaces using displacement will have the same motion vectors as if they were not displaced.
Diffstat (limited to 'source/blender/draw/engines/eevee_next/shaders')
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl13
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl11
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl11
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl13
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl101
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl58
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh4
-rw-r--r--source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh55
8 files changed, 266 insertions, 0 deletions
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
index 708bd153e84..a9ee710ec99 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
@@ -5,6 +5,7 @@
#pragma BLENDER_REQUIRE(eevee_attributes_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_nodetree_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
void main()
{
@@ -27,6 +28,18 @@ void main()
interp.N = cross(T, interp.curves_binormal);
interp.curves_strand_id = hair_get_strand_id();
interp.barycentric_coords = hair_get_barycentric();
+#ifdef MAT_VELOCITY
+ /* Due to the screen space nature of the vertex positioning, we compute only the motion of curve
+ * strand, not its cylinder. Otherwise we would add the rotation velocity. */
+ vec3 prv, nxt;
+ velocity_local_pos_get(pos, hair_get_base_id(), prv, nxt);
+ /* FIXME(fclem): Evaluating before displacement avoid displacement being treated as motion but
+ * ignores motion from animated displacement. Supporting animated displacement motion vectors
+ * would require evaluating the nodetree multiple time with different nodetree UBOs evaluated at
+ * different times, but also with different attributes (maybe we could assume static attribute at
+ * least). */
+ velocity_vertex(P_prev, P_curr, P_next, motion.prev, motion.next);
+#endif
init_globals();
attrib_load();
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
index 5b404ec5237..c60527162f7 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
@@ -3,6 +3,7 @@
#pragma BLENDER_REQUIRE(common_view_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_attributes_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
void main()
{
@@ -38,6 +39,16 @@ void main()
aspect,
thickness,
hardness);
+#ifdef MAT_VELOCITY
+ /* GPencil do not support deformation motion blur. */
+ vec3 lP_curr = transform_point(ModelMatrixInverse, interp.P);
+ /* FIXME(fclem): Evaluating before displacement avoid displacement being treated as motion but
+ * ignores motion from animated displacement. Supporting animated displacement motion vectors
+ * would require evaluating the nodetree multiple time with different nodetree UBOs evaluated at
+ * different times, but also with different attributes (maybe we could assume static attribute at
+ * least). */
+ velocity_vertex(lP_curr, lP_curr, lP_curr, motion.prev, motion.next);
+#endif
init_globals();
attrib_load();
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl
index 7b38057f41a..c07a8ae0eea 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl
@@ -3,6 +3,7 @@
#pragma BLENDER_REQUIRE(eevee_attributes_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_nodetree_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
void main()
{
@@ -10,6 +11,16 @@ void main()
interp.P = point_object_to_world(pos);
interp.N = normal_object_to_world(nor);
+#ifdef MAT_VELOCITY
+ vec3 prv, nxt;
+ velocity_local_pos_get(pos, gl_VertexID, prv, nxt);
+ /* FIXME(fclem): Evaluating before displacement avoid displacement being treated as motion but
+ * ignores motion from animated displacement. Supporting animated displacement motion vectors
+ * would require evaluating the nodetree multiple time with different nodetree UBOs evaluated at
+ * different times, but also with different attributes (maybe we could assume static attribute at
+ * least). */
+ velocity_vertex(prv, pos, nxt, motion.prev, motion.next);
+#endif
init_globals();
attrib_load();
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl
index 002eed91130..7ddf941df7c 100644
--- a/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_surf_depth_frag.glsl
@@ -8,6 +8,7 @@
#pragma BLENDER_REQUIRE(common_hair_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_nodetree_lib.glsl)
#pragma BLENDER_REQUIRE(eevee_surf_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
/* From the paper "Hashed Alpha Testing" by Chris Wyman and Morgan McGuire. */
float hash(vec2 a)
@@ -69,4 +70,16 @@ void main()
discard;
}
#endif
+
+#ifdef MAT_VELOCITY
+ vec4 out_velocity_camera; /* TODO(fclem): Panoramic cameras. */
+ velocity_camera(interp.P + motion.prev,
+ interp.P,
+ interp.P - motion.next,
+ out_velocity_camera,
+ out_velocity_view);
+
+ /* For testing in viewport. */
+ out_velocity_view.zw = vec2(0.0);
+#endif
}
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl
new file mode 100644
index 00000000000..435ae6658c9
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_lib.glsl
@@ -0,0 +1,101 @@
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_camera_lib.glsl)
+
+#ifdef VELOCITY_CAMERA
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns uv space motion vectors in pairs (motion_prev.xy, motion_next.xy)
+ */
+vec4 velocity_view(vec3 P_prev, vec3 P, vec3 P_next)
+{
+ vec2 prev_uv, curr_uv, next_uv;
+
+ prev_uv = transform_point(ProjectionMatrix, transform_point(camera_prev.viewmat, P_prev)).xy;
+ curr_uv = transform_point(ViewProjectionMatrix, P).xy;
+ next_uv = transform_point(ProjectionMatrix, transform_point(camera_next.viewmat, P_next)).xy;
+
+ vec4 motion;
+ motion.xy = prev_uv - curr_uv;
+ motion.zw = curr_uv - next_uv;
+ /* Convert NDC velocity to UV velocity */
+ motion *= 0.5;
+
+ return motion;
+}
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns uv space motion vectors in pairs (motion_prev.xy, motion_next.xy)
+ * \a velocity_camera is the motion in film UV space after camera projection.
+ * \a velocity_view is the motion in ShadingView UV space. It is different
+ * from velocity_camera for multi-view rendering.
+ */
+void velocity_camera(vec3 P_prev, vec3 P, vec3 P_next, out vec4 vel_camera, out vec4 vel_view)
+{
+ vec2 prev_uv, curr_uv, next_uv;
+ prev_uv = camera_uv_from_world(camera_prev, P_prev);
+ curr_uv = camera_uv_from_world(camera_curr, P);
+ next_uv = camera_uv_from_world(camera_next, P_next);
+
+ vel_camera.xy = prev_uv - curr_uv;
+ vel_camera.zw = curr_uv - next_uv;
+
+ if (is_panoramic(camera_curr.type)) {
+ /* This path is only used if using using panoramic projections. Since the views always have
+ * the same 45° aperture angle, we can safely reuse the projection matrix. */
+ prev_uv = transform_point(ProjectionMatrix, transform_point(camera_prev.viewmat, P_prev)).xy;
+ curr_uv = transform_point(ViewProjectionMatrix, P).xy;
+ next_uv = transform_point(ProjectionMatrix, transform_point(camera_next.viewmat, P_next)).xy;
+
+ vel_view.xy = prev_uv - curr_uv;
+ vel_view.zw = curr_uv - next_uv;
+ /* Convert NDC velocity to UV velocity */
+ vel_view *= 0.5;
+ }
+ else {
+ vel_view = vel_camera;
+ }
+}
+
+#endif
+
+#ifdef MAT_VELOCITY
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns a tuple of world space motion deltas.
+ */
+void velocity_local_pos_get(vec3 lP, int vert_id, out vec3 lP_prev, out vec3 lP_next)
+{
+ VelocityIndex vel = velocity_indirection_buf[resource_id];
+ lP_next = lP_prev = lP;
+ if (vel.geo.do_deform) {
+ if (vel.geo.ofs[STEP_PREVIOUS] != -1) {
+ lP_prev = velocity_geo_prev_buf[vel.geo.ofs[STEP_PREVIOUS] + vert_id].xyz;
+ }
+ if (vel.geo.ofs[STEP_NEXT] != -1) {
+ lP_next = velocity_geo_next_buf[vel.geo.ofs[STEP_NEXT] + vert_id].xyz;
+ }
+ }
+}
+
+/**
+ * Given a triple of position, compute the previous and next motion vectors.
+ * Returns a tuple of world space motion deltas.
+ */
+void velocity_vertex(
+ vec3 lP_prev, vec3 lP, vec3 lP_next, out vec3 motion_prev, out vec3 motion_next)
+{
+ VelocityIndex vel = velocity_indirection_buf[resource_id];
+ mat4 obmat_prev = velocity_obj_prev_buf[vel.obj.ofs[STEP_PREVIOUS]];
+ mat4 obmat_next = velocity_obj_next_buf[vel.obj.ofs[STEP_NEXT]];
+ vec3 P_prev = transform_point(obmat_prev, lP_prev);
+ vec3 P_next = transform_point(obmat_next, lP_next);
+ vec3 P = transform_point(ModelMatrix, lP);
+ motion_prev = P_prev - P;
+ motion_next = P_next - P;
+}
+
+#endif
diff --git a/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl
new file mode 100644
index 00000000000..b68b2eaf117
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/shaders/eevee_velocity_resolve_comp.glsl
@@ -0,0 +1,58 @@
+
+/**
+ * Fullscreen pass that compute motion vector for static geometry.
+ * Animated geometry has already written correct motion vectors.
+ */
+
+#pragma BLENDER_REQUIRE(common_view_lib.glsl)
+#pragma BLENDER_REQUIRE(eevee_velocity_lib.glsl)
+
+#define is_valid_output(img_) (imageSize(img_).x > 1)
+
+void main()
+{
+ ivec2 texel = ivec2(gl_GlobalInvocationID.xy);
+ vec4 motion = imageLoad(velocity_view_img, texel);
+
+ bool pixel_has_valid_motion = (motion.x != VELOCITY_INVALID);
+ float depth = texelFetch(depth_tx, texel, 0).r;
+ bool is_background = (depth == 1.0f);
+
+ vec2 uv = vec2(texel) * drw_view.viewport_size_inverse;
+ vec3 P_next, P_prev, P_curr;
+
+ if (pixel_has_valid_motion) {
+ /* Animated geometry. View motion already computed during prepass. Convert only to camera. */
+ // P_prev = get_world_space_from_depth(uv + motion.xy, 0.5);
+ // P_curr = get_world_space_from_depth(uv, 0.5);
+ // P_next = get_world_space_from_depth(uv + motion.zw, 0.5);
+ return;
+ }
+ else if (is_background) {
+ /* NOTE: Use viewCameraVec to avoid imprecision if camera is far from origin. */
+ vec3 vV = viewCameraVec(get_view_space_from_depth(uv, 1.0));
+ vec3 V = transform_direction(ViewMatrixInverse, vV);
+ /* Background has no motion under camera translation. Translate view vector with the camera. */
+ /* WATCH(fclem): Might create precision issues. */
+ P_next = camera_next.viewinv[3].xyz + V;
+ P_curr = camera_curr.viewinv[3].xyz + V;
+ P_prev = camera_prev.viewinv[3].xyz + V;
+ }
+ else {
+ /* Static geometry. No translation in world space. */
+ P_curr = get_world_space_from_depth(uv, depth);
+ P_prev = P_curr;
+ P_next = P_curr;
+ }
+
+ vec4 vel_camera, vel_view;
+ velocity_camera(P_prev, P_curr, P_next, vel_camera, vel_view);
+
+ if (in_texture_range(texel, depth_tx)) {
+ imageStore(velocity_view_img, texel, vel_view);
+
+ if (is_valid_output(velocity_camera_img)) {
+ imageStore(velocity_camera_img, texel, vel_camera);
+ }
+ }
+}
diff --git a/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh
index 12b8e085455..49250b5741e 100644
--- a/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh
+++ b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_material_info.hh
@@ -22,6 +22,7 @@ GPU_SHADER_CREATE_INFO(eevee_sampling_data)
* \{ */
GPU_SHADER_CREATE_INFO(eevee_geom_mesh)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_MESH")
.vertex_in(0, Type::VEC3, "pos")
.vertex_in(1, Type::VEC3, "nor")
@@ -29,16 +30,19 @@ GPU_SHADER_CREATE_INFO(eevee_geom_mesh)
.additional_info("draw_mesh", "draw_resource_id_varying", "draw_resource_handle");
GPU_SHADER_CREATE_INFO(eevee_geom_gpencil)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_GPENCIL")
.vertex_source("eevee_geom_gpencil_vert.glsl")
.additional_info("draw_gpencil", "draw_resource_id_varying", "draw_resource_handle");
GPU_SHADER_CREATE_INFO(eevee_geom_curves)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_CURVES")
.vertex_source("eevee_geom_curves_vert.glsl")
.additional_info("draw_hair", "draw_resource_id_varying", "draw_resource_handle");
GPU_SHADER_CREATE_INFO(eevee_geom_world)
+ .additional_info("eevee_shared")
.define("MAT_GEOM_WORLD")
.builtins(BuiltinBits::VERTEX_ID)
.vertex_source("eevee_geom_world_vert.glsl")
diff --git a/source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh
new file mode 100644
index 00000000000..a5f16363466
--- /dev/null
+++ b/source/blender/draw/engines/eevee_next/shaders/infos/eevee_velocity_info.hh
@@ -0,0 +1,55 @@
+
+#include "gpu_shader_create_info.hh"
+
+/* -------------------------------------------------------------------- */
+/** \name Surface Velocity
+ *
+ * Combined with the depth prepass shader.
+ * Outputs the view motion vectors for animated objects.
+ * \{ */
+
+/* Pass world space deltas to the fragment shader.
+ * This is to make sure that the resulting motion vectors are valid even with displacement. */
+GPU_SHADER_INTERFACE_INFO(eevee_velocity_surface_iface, "motion")
+ .smooth(Type::VEC3, "prev")
+ .smooth(Type::VEC3, "next");
+
+GPU_SHADER_CREATE_INFO(eevee_velocity_camera)
+ .define("VELOCITY_CAMERA")
+ .uniform_buf(1, "CameraData", "camera_prev")
+ .uniform_buf(2, "CameraData", "camera_curr")
+ .uniform_buf(3, "CameraData", "camera_next");
+
+GPU_SHADER_CREATE_INFO(eevee_velocity_geom)
+ .define("MAT_VELOCITY")
+ .auto_resource_location(true)
+ .storage_buf(4, Qualifier::READ, "mat4", "velocity_obj_prev_buf[]", Frequency::PASS)
+ .storage_buf(5, Qualifier::READ, "mat4", "velocity_obj_next_buf[]", Frequency::PASS)
+ .storage_buf(6, Qualifier::READ, "vec4", "velocity_geo_prev_buf[]", Frequency::PASS)
+ .storage_buf(7, Qualifier::READ, "vec4", "velocity_geo_next_buf[]", Frequency::PASS)
+ .storage_buf(
+ 7, Qualifier::READ, "VelocityIndex", "velocity_indirection_buf[]", Frequency::PASS)
+ .vertex_out(eevee_velocity_surface_iface)
+ .fragment_out(0, Type::VEC4, "out_velocity_view")
+ .additional_info("eevee_velocity_camera");
+
+/** \} */
+
+/* -------------------------------------------------------------------- */
+/** \name Velocity Resolve
+ *
+ * Computes velocity for static objects.
+ * Also converts motion to camera space (as opposed to view space) if needed.
+ * \{ */
+
+GPU_SHADER_CREATE_INFO(eevee_velocity_resolve)
+ .do_static_compilation(true)
+ .local_group_size(8, 8)
+ .sampler(0, ImageType::DEPTH_2D, "depth_tx")
+ .image(0, GPU_RG16F, Qualifier::READ_WRITE, ImageType::FLOAT_2D, "velocity_view_img")
+ .image(1, GPU_RG16F, Qualifier::WRITE, ImageType::FLOAT_2D, "velocity_camera_img")
+ .additional_info("eevee_shared")
+ .compute_source("eevee_velocity_resolve_comp.glsl")
+ .additional_info("draw_view", "eevee_velocity_camera");
+
+/** \} */