Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClément Foucault <foucault.clem@gmail.com>2020-06-23 14:59:55 +0300
committerClément Foucault <foucault.clem@gmail.com>2020-06-23 15:04:41 +0300
commit439b40e601f8cdae9a12fc3f503e9e6acdd596d5 (patch)
tree9a485ec18d1c9dd030ffdfe9309193adc96dc515 /source/blender/draw/engines/eevee/eevee_motion_blur.c
parentcc3e808ab47887c002faaa8a28318a2b4f47e02a (diff)
EEVEE: Motion Blur: Add accumulation motion blur for better precision
This revisit the render pipeline to support time slicing for better motion blur. We support accumulation with or without the Post-process motion blur. If using the post-process, we reuse last step next motion data to avoid another scene reevaluation. This also adds support for hair motion blur which is handled in a similar way as mesh motion blur. The total number of samples is distributed evenly accross all timesteps to avoid sampling weighting issues. For this reason, the sample count is (internally) rounded up to the next multiple of the step count. Only FX Motion BLur: {F8632258} FX Motion Blur + 4 time steps: {F8632260} FX Motion Blur + 32 time steps: {F8632261} Reviewed By: jbakker Differential Revision: https://developer.blender.org/D8079
Diffstat (limited to 'source/blender/draw/engines/eevee/eevee_motion_blur.c')
-rw-r--r--source/blender/draw/engines/eevee/eevee_motion_blur.c302
1 files changed, 242 insertions, 60 deletions
diff --git a/source/blender/draw/engines/eevee/eevee_motion_blur.c b/source/blender/draw/engines/eevee/eevee_motion_blur.c
index b90d575b80c..586ee780f1d 100644
--- a/source/blender/draw/engines/eevee/eevee_motion_blur.c
+++ b/source/blender/draw/engines/eevee/eevee_motion_blur.c
@@ -25,6 +25,7 @@
#include "DRW_render.h"
#include "BLI_rand.h"
+#include "BLI_string_utils.h"
#include "BKE_animsys.h"
#include "BKE_camera.h"
@@ -34,6 +35,8 @@
#include "DNA_anim_types.h"
#include "DNA_camera_types.h"
#include "DNA_mesh_types.h"
+#include "DNA_modifier_types.h"
+#include "DNA_particle_types.h"
#include "DNA_screen_types.h"
#include "ED_screen.h"
@@ -49,6 +52,7 @@ static struct {
/* Motion Blur */
struct GPUShader *motion_blur_sh;
struct GPUShader *motion_blur_object_sh;
+ struct GPUShader *motion_blur_hair_sh;
struct GPUShader *velocity_tiles_sh;
struct GPUShader *velocity_tiles_expand_sh;
} e_data = {NULL}; /* Engine data */
@@ -57,6 +61,7 @@ extern char datatoc_effect_velocity_tile_frag_glsl[];
extern char datatoc_effect_motion_blur_frag_glsl[];
extern char datatoc_object_motion_frag_glsl[];
extern char datatoc_object_motion_vert_glsl[];
+extern char datatoc_common_hair_lib_glsl[];
extern char datatoc_common_view_lib_glsl[];
#define EEVEE_VELOCITY_TILE_SIZE 32
@@ -79,9 +84,14 @@ static void eevee_create_shader_motion_blur(void)
datatoc_effect_velocity_tile_frag_glsl,
"#define TILE_EXPANSION\n"
"#define EEVEE_VELOCITY_TILE_SIZE " STRINGIFY(EEVEE_VELOCITY_TILE_SIZE) "\n");
+
+ char *vert = BLI_string_joinN(datatoc_common_hair_lib_glsl, datatoc_object_motion_vert_glsl);
+ e_data.motion_blur_hair_sh = DRW_shader_create_with_lib(
+ vert, NULL, datatoc_object_motion_frag_glsl, datatoc_common_view_lib_glsl, "#define HAIR\n");
+ MEM_freeN(vert);
}
-int EEVEE_motion_blur_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Data *vedata, Object *camera)
+int EEVEE_motion_blur_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Data *vedata)
{
EEVEE_StorageList *stl = vedata->stl;
EEVEE_FramebufferList *fbl = vedata->fbl;
@@ -95,7 +105,9 @@ int EEVEE_motion_blur_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Data *veda
return 0;
}
- if (scene->eevee.flag & SCE_EEVEE_MOTION_BLUR_ENABLED) {
+ effects->motion_blur_max = max_ii(0, scene->eevee.motion_blur_max);
+
+ if ((effects->motion_blur_max > 0) && (scene->eevee.flag & SCE_EEVEE_MOTION_BLUR_ENABLED)) {
if (!e_data.motion_blur_sh) {
eevee_create_shader_motion_blur();
}
@@ -107,17 +119,6 @@ int EEVEE_motion_blur_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Data *veda
DRW_view_persmat_get(NULL, effects->motion_blur.camera[mb_step].persinv, true);
}
- if (camera != NULL) {
- Camera *cam = camera->data;
- effects->motion_blur_near_far[0] = cam->clip_start;
- effects->motion_blur_near_far[1] = cam->clip_end;
- }
- else {
- /* Not supported yet. */
- BLI_assert(0);
- }
-
- effects->motion_blur_max = max_ii(0, scene->eevee.motion_blur_max);
const float *fs_size = DRW_viewport_size_get();
int tx_size[2] = {1 + ((int)fs_size[0] / EEVEE_VELOCITY_TILE_SIZE),
1 + ((int)fs_size[1] / EEVEE_VELOCITY_TILE_SIZE)};
@@ -146,13 +147,23 @@ int EEVEE_motion_blur_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Data *veda
void EEVEE_motion_blur_step_set(EEVEE_Data *vedata, int step)
{
BLI_assert(step < 3);
- /* Meh, code duplication. Could be avoided if render init would not contain cache init. */
- if (vedata->stl->effects == NULL) {
- vedata->stl->effects = MEM_callocN(sizeof(*vedata->stl->effects), __func__);
- }
vedata->stl->effects->motion_blur_step = step;
}
+static void eevee_motion_blur_sync_camera(EEVEE_Data *vedata)
+{
+ EEVEE_EffectsInfo *effects = vedata->stl->effects;
+ if (DRW_state_is_scene_render()) {
+ int mb_step = effects->motion_blur_step;
+ DRW_view_viewmat_get(NULL, effects->motion_blur.camera[mb_step].viewmat, false);
+ DRW_view_persmat_get(NULL, effects->motion_blur.camera[mb_step].persmat, false);
+ DRW_view_persmat_get(NULL, effects->motion_blur.camera[mb_step].persinv, true);
+ }
+
+ effects->motion_blur_near_far[0] = fabsf(DRW_view_near_distance_get(NULL));
+ effects->motion_blur_near_far[1] = fabsf(DRW_view_far_distance_get(NULL));
+}
+
void EEVEE_motion_blur_cache_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Data *vedata)
{
EEVEE_PassList *psl = vedata->psl;
@@ -167,6 +178,9 @@ void EEVEE_motion_blur_cache_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Dat
const float *fs_size = DRW_viewport_size_get();
int tx_size[2] = {GPU_texture_width(effects->velocity_tiles_tx),
GPU_texture_height(effects->velocity_tiles_tx)};
+
+ eevee_motion_blur_sync_camera(vedata);
+
DRWShadingGroup *grp;
{
DRW_PASS_CREATE(psl->velocity_tiles_x, DRW_STATE_WRITE_COLOR);
@@ -230,6 +244,15 @@ void EEVEE_motion_blur_cache_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Dat
DRW_shgroup_uniform_mat4(grp, "prevViewProjMatrix", mb_data->camera[MB_PREV].persmat);
DRW_shgroup_uniform_mat4(grp, "currViewProjMatrix", mb_data->camera[MB_CURR].persmat);
DRW_shgroup_uniform_mat4(grp, "nextViewProjMatrix", mb_data->camera[MB_NEXT].persmat);
+
+ DRW_PASS_CREATE(psl->velocity_hair, DRW_STATE_WRITE_COLOR | DRW_STATE_DEPTH_EQUAL);
+
+ mb_data->hair_grp = grp = DRW_shgroup_create(e_data.motion_blur_hair_sh, psl->velocity_hair);
+ DRW_shgroup_uniform_mat4(grp, "prevViewProjMatrix", mb_data->camera[MB_PREV].persmat);
+ DRW_shgroup_uniform_mat4(grp, "currViewProjMatrix", mb_data->camera[MB_CURR].persmat);
+ DRW_shgroup_uniform_mat4(grp, "nextViewProjMatrix", mb_data->camera[MB_NEXT].persmat);
+
+ DRW_pass_link(psl->velocity_object, psl->velocity_hair);
}
EEVEE_motion_blur_data_init(mb_data);
@@ -237,6 +260,59 @@ void EEVEE_motion_blur_cache_init(EEVEE_ViewLayerData *UNUSED(sldata), EEVEE_Dat
else {
psl->motion_blur = NULL;
psl->velocity_object = NULL;
+ psl->velocity_hair = NULL;
+ }
+}
+
+void EEVEE_motion_blur_hair_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
+ EEVEE_Data *vedata,
+ Object *ob,
+ ParticleSystem *psys,
+ ModifierData *md)
+{
+ EEVEE_PassList *psl = vedata->psl;
+ EEVEE_StorageList *stl = vedata->stl;
+ EEVEE_EffectsInfo *effects = stl->effects;
+ DRWShadingGroup *grp = NULL;
+
+ if (!DRW_state_is_scene_render() || psl->velocity_hair == NULL) {
+ return;
+ }
+
+ /* For now we assume hair objects are always moving. */
+ EEVEE_ObjectMotionData *mb_data = EEVEE_motion_blur_object_data_get(
+ &effects->motion_blur, ob, true);
+
+ if (mb_data) {
+ int mb_step = effects->motion_blur_step;
+ /* Store transform */
+ DRW_hair_duplimat_get(ob, psys, md, mb_data->obmat[mb_step]);
+
+ EEVEE_GeometryMotionData *mb_geom = EEVEE_motion_blur_geometry_data_get(
+ &effects->motion_blur, ob, true);
+
+ if (mb_step == MB_CURR) {
+ /* Fill missing matrices if the object was hidden in previous or next frame. */
+ if (is_zero_m4(mb_data->obmat[MB_PREV])) {
+ copy_m4_m4(mb_data->obmat[MB_PREV], mb_data->obmat[MB_CURR]);
+ }
+ if (is_zero_m4(mb_data->obmat[MB_NEXT])) {
+ copy_m4_m4(mb_data->obmat[MB_NEXT], mb_data->obmat[MB_CURR]);
+ }
+
+ grp = DRW_shgroup_hair_create_sub(ob, psys, md, effects->motion_blur.hair_grp);
+ DRW_shgroup_uniform_mat4(grp, "prevModelMatrix", mb_data->obmat[MB_PREV]);
+ DRW_shgroup_uniform_mat4(grp, "currModelMatrix", mb_data->obmat[MB_CURR]);
+ DRW_shgroup_uniform_mat4(grp, "nextModelMatrix", mb_data->obmat[MB_NEXT]);
+ DRW_shgroup_uniform_texture(grp, "prvBuffer", mb_geom->hair_pos_tx[MB_PREV]);
+ DRW_shgroup_uniform_texture(grp, "nxtBuffer", mb_geom->hair_pos_tx[MB_NEXT]);
+ DRW_shgroup_uniform_bool(grp, "useDeform", &mb_geom->use_deform, 1);
+ }
+ else {
+ /* Store vertex position buffer. */
+ mb_geom->hair_pos[mb_step] = DRW_hair_pos_buffer_get(ob, psys, md);
+ mb_geom->use_deform = true;
+ }
}
}
@@ -262,15 +338,16 @@ void EEVEE_motion_blur_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
return;
}
- EEVEE_ObjectMotionData *mb_data = EEVEE_motion_blur_object_data_get(&effects->motion_blur, ob);
+ EEVEE_ObjectMotionData *mb_data = EEVEE_motion_blur_object_data_get(
+ &effects->motion_blur, ob, false);
if (mb_data) {
int mb_step = effects->motion_blur_step;
/* Store transform */
copy_m4_m4(mb_data->obmat[mb_step], ob->obmat);
- EEVEE_GeometryMotionData *mb_geom = EEVEE_motion_blur_geometry_data_get(&effects->motion_blur,
- ob);
+ EEVEE_GeometryMotionData *mb_geom = EEVEE_motion_blur_geometry_data_get(
+ &effects->motion_blur, ob, false);
if (mb_step == MB_CURR) {
GPUBatch *batch = DRW_cache_object_surface_get(ob);
@@ -295,6 +372,17 @@ void EEVEE_motion_blur_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
DRW_shgroup_call(grp, batch, ob);
if (mb_geom->use_deform) {
+ EEVEE_ObjectEngineData *oedata = EEVEE_object_data_ensure(ob);
+ if (!oedata->geom_update) {
+ /* FIXME(fclem) There can be false positive where the actual mesh is not updated.
+ * This avoids a crash but removes the motion blur from some object.
+ * Maybe an issue with depsgraph tagging. */
+ mb_geom->use_deform = false;
+ oedata->geom_update = false;
+
+ GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_PREV]);
+ GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_NEXT]);
+ }
/* Keep to modify later (after init). */
mb_geom->batch = batch;
}
@@ -321,52 +409,151 @@ void EEVEE_motion_blur_cache_finish(EEVEE_Data *vedata)
return;
}
+ int mb_step = effects->motion_blur_step;
+
+ if (mb_step != MB_CURR) {
+ /* Push instances attributes to the GPU. */
+ DRW_render_instance_buffer_finish();
+
+ /* Need to be called after DRW_render_instance_buffer_finish() */
+ /* Also we weed to have a correct fbo bound for DRW_hair_update */
+ GPU_framebuffer_bind(vedata->fbl->main_fb);
+ DRW_hair_update();
+
+ DRW_cache_restart();
+ }
+
for (BLI_ghashIterator_init(&ghi, effects->motion_blur.geom);
BLI_ghashIterator_done(&ghi) == false;
BLI_ghashIterator_step(&ghi)) {
EEVEE_GeometryMotionData *mb_geom = BLI_ghashIterator_getValue(&ghi);
- int mb_step = effects->motion_blur_step;
-
if (!mb_geom->use_deform) {
continue;
}
- if (mb_step == MB_CURR) {
- /* Modify batch to have data from adjacent frames. */
- GPUBatch *batch = mb_geom->batch;
- for (int i = 0; i < MB_CURR; i++) {
- GPUVertBuf *vbo = mb_geom->vbo[i];
- if (vbo && batch) {
- if (vbo->vertex_len != batch->verts[0]->vertex_len) {
- /* Vertex count mismatch, disable deform motion blur. */
- mb_geom->use_deform = false;
- GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_PREV]);
- GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_NEXT]);
- break;
+ switch (mb_geom->type) {
+ case EEVEE_HAIR_GEOM_MOTION_DATA:
+ if (mb_step == MB_CURR) {
+ /* TODO(fclem) Check if vertex count mismatch. */
+ mb_geom->use_deform = true;
+ }
+ else {
+ mb_geom->hair_pos[mb_step] = GPU_vertbuf_duplicate(mb_geom->hair_pos[mb_step]);
+
+ /* Create vbo immediately to bind to texture buffer. */
+ GPU_vertbuf_use(mb_geom->hair_pos[mb_step]);
+
+ mb_geom->hair_pos_tx[mb_step] = GPU_texture_create_from_vertbuf(
+ mb_geom->hair_pos[mb_step]);
+ }
+ break;
+
+ case EEVEE_MESH_GEOM_MOTION_DATA:
+ if (mb_step == MB_CURR) {
+ /* Modify batch to have data from adjacent frames. */
+ GPUBatch *batch = mb_geom->batch;
+ for (int i = 0; i < MB_CURR; i++) {
+ GPUVertBuf *vbo = mb_geom->vbo[i];
+ if (vbo && batch) {
+ if (vbo->vertex_len != batch->verts[0]->vertex_len) {
+ /* Vertex count mismatch, disable deform motion blur. */
+ mb_geom->use_deform = false;
+ }
+
+ if (mb_geom->use_deform == false) {
+ GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_PREV]);
+ GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_NEXT]);
+ break;
+ }
+ else {
+ /* Modify the batch to include the previous & next position. */
+ if (i == MB_PREV) {
+ GPU_batch_vertbuf_add_ex(batch, vbo, true);
+ mb_geom->vbo[i] = NULL;
+ }
+ else {
+ /* This VBO can be reuse by next time step. Don't pass ownership. */
+ GPU_batch_vertbuf_add_ex(batch, vbo, false);
+ }
+ }
+ }
}
- /* Modify the batch to include the previous position. */
- GPU_batch_vertbuf_add_ex(batch, vbo, true);
- /* TODO(fclem) keep the vbo around for next (sub)frames. */
- /* Only do once. */
- mb_geom->vbo[i] = NULL;
}
- }
+ else {
+ GPUVertBuf *vbo = mb_geom->vbo[mb_step];
+ /* If this assert fails, it means that different EEVEE_GeometryMotionDatas
+ * has been used for each motion blur step. */
+ BLI_assert(vbo);
+ if (vbo) {
+ /* Use the vbo to perform the copy on the GPU. */
+ GPU_vertbuf_use(vbo);
+ /* Perform a copy to avoid loosing it after RE_engine_frame_set(). */
+ mb_geom->vbo[mb_step] = vbo = GPU_vertbuf_duplicate(vbo);
+ /* Find and replace "pos" attrib name. */
+ int attrib_id = GPU_vertformat_attr_id_get(&vbo->format, "pos");
+ GPU_vertformat_attr_rename(
+ &vbo->format, attrib_id, (mb_step == MB_PREV) ? "prv" : "nxt");
+ }
+ }
+ break;
+
+ default:
+ BLI_assert(0);
+ break;
}
- else {
- GPUVertBuf *vbo = mb_geom->vbo[mb_step];
- /* If this assert fails, it means that different EEVEE_GeometryMotionDatas
- * has been used for each motion blur step. */
- BLI_assert(vbo);
- if (vbo) {
- /* Use the vbo to perform the copy on the GPU. */
- GPU_vertbuf_use(vbo);
- /* Perform a copy to avoid loosing it after RE_engine_frame_set(). */
- mb_geom->vbo[mb_step] = vbo = GPU_vertbuf_duplicate(vbo);
- /* Find and replace "pos" attrib name. */
- int attrib_id = GPU_vertformat_attr_id_get(&vbo->format, "pos");
- GPU_vertformat_attr_rename(&vbo->format, attrib_id, (mb_step == MB_PREV) ? "prv" : "nxt");
- }
+ }
+}
+
+void EEVEE_motion_blur_swap_data(EEVEE_Data *vedata)
+{
+ EEVEE_StorageList *stl = vedata->stl;
+ EEVEE_EffectsInfo *effects = stl->effects;
+
+ GHashIterator ghi;
+
+ BLI_assert((effects->enabled_effects & EFFECT_MOTION_BLUR) != 0);
+
+ /* Camera Data. */
+ effects->motion_blur.camera[MB_PREV] = effects->motion_blur.camera[MB_CURR];
+
+ /* Object Data. */
+ for (BLI_ghashIterator_init(&ghi, effects->motion_blur.object);
+ BLI_ghashIterator_done(&ghi) == false;
+ BLI_ghashIterator_step(&ghi)) {
+ EEVEE_ObjectMotionData *mb_data = BLI_ghashIterator_getValue(&ghi);
+
+ copy_m4_m4(mb_data->obmat[MB_PREV], mb_data->obmat[MB_NEXT]);
+ }
+
+ /* Deformation Data. */
+ for (BLI_ghashIterator_init(&ghi, effects->motion_blur.geom);
+ BLI_ghashIterator_done(&ghi) == false;
+ BLI_ghashIterator_step(&ghi)) {
+ EEVEE_GeometryMotionData *mb_geom = BLI_ghashIterator_getValue(&ghi);
+
+ switch (mb_geom->type) {
+ case EEVEE_HAIR_GEOM_MOTION_DATA:
+ GPU_VERTBUF_DISCARD_SAFE(mb_geom->hair_pos[MB_PREV]);
+ DRW_TEXTURE_FREE_SAFE(mb_geom->hair_pos_tx[MB_PREV]);
+ mb_geom->hair_pos[MB_PREV] = mb_geom->hair_pos[MB_NEXT];
+ mb_geom->hair_pos_tx[MB_PREV] = mb_geom->hair_pos_tx[MB_NEXT];
+ break;
+
+ case EEVEE_MESH_GEOM_MOTION_DATA:
+ GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_PREV]);
+ mb_geom->vbo[MB_PREV] = mb_geom->vbo[MB_NEXT];
+
+ if (mb_geom->vbo[MB_NEXT]) {
+ GPUVertBuf *vbo = mb_geom->vbo[MB_NEXT];
+ int attrib_id = GPU_vertformat_attr_id_get(&vbo->format, "nxt");
+ GPU_vertformat_attr_rename(&vbo->format, attrib_id, "prv");
+ }
+ break;
+
+ default:
+ BLI_assert(0);
+ break;
}
}
}
@@ -381,12 +568,6 @@ void EEVEE_motion_blur_draw(EEVEE_Data *vedata)
/* Motion Blur */
if ((effects->enabled_effects & EFFECT_MOTION_BLUR) != 0) {
- int sample = DRW_state_is_image_render() ? effects->taa_render_sample :
- effects->taa_current_sample;
- double r;
- BLI_halton_1d(2, 0.0, sample - 1, &r);
- effects->motion_blur_sample_offset = r;
-
/* Create velocity max tiles in 2 passes. One for each dimension. */
GPU_framebuffer_bind(fbl->velocity_tiles_fb[0]);
DRW_draw_pass(psl->velocity_tiles_x);
@@ -421,6 +602,7 @@ void EEVEE_motion_blur_free(void)
{
DRW_SHADER_FREE_SAFE(e_data.motion_blur_sh);
DRW_SHADER_FREE_SAFE(e_data.motion_blur_object_sh);
+ DRW_SHADER_FREE_SAFE(e_data.motion_blur_hair_sh);
DRW_SHADER_FREE_SAFE(e_data.velocity_tiles_sh);
DRW_SHADER_FREE_SAFE(e_data.velocity_tiles_expand_sh);
}