From e1113a83ccb6bb30e3ad865956fe9eed0d650394 Mon Sep 17 00:00:00 2001 From: Marton Balint Date: Sun, 10 Dec 2017 00:29:12 +0100 Subject: avfilter/vf_framerate: fix scene change detection score - normalize score to [0..100] instead of [0..85] - change the default score to 8.2 to roughly keep existing behaviour - take into account bit depth - do not truncate to integer Signed-off-by: Marton Balint --- libavfilter/vf_framerate.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'libavfilter/vf_framerate.c') diff --git a/libavfilter/vf_framerate.c b/libavfilter/vf_framerate.c index 1cad2305ad..dd106f8e5b 100644 --- a/libavfilter/vf_framerate.c +++ b/libavfilter/vf_framerate.c @@ -88,7 +88,7 @@ static const AVOption framerate_options[] = { {"interp_start", "point to start linear interpolation", OFFSET(interp_start), AV_OPT_TYPE_INT, {.i64=15}, 0, 255, V|F }, {"interp_end", "point to end linear interpolation", OFFSET(interp_end), AV_OPT_TYPE_INT, {.i64=240}, 0, 255, V|F }, - {"scene", "scene change level", OFFSET(scene_score), AV_OPT_TYPE_DOUBLE, {.dbl=7.0}, 0, INT_MAX, V|F }, + {"scene", "scene change level", OFFSET(scene_score), AV_OPT_TYPE_DOUBLE, {.dbl=8.2}, 0, INT_MAX, V|F }, {"flags", "set flags", OFFSET(flags), AV_OPT_TYPE_FLAGS, {.i64=1}, 0, INT_MAX, V|F, "flags" }, {"scene_change_detect", "enable scene change detection", 0, AV_OPT_TYPE_CONST, {.i64=FRAMERATE_FLAG_SCD}, INT_MIN, INT_MAX, V|F, "flags" }, @@ -183,7 +183,7 @@ static double get_scene_score(AVFilterContext *ctx, AVFrame *crnt, AVFrame *next else sad = scene_sad16(s, (const uint16_t*)crnt->data[0], crnt->linesize[0] >> 1, (const uint16_t*)next->data[0], next->linesize[0] >> 1, crnt->height); - mafd = sad / (crnt->height * crnt->width * 3); + mafd = (double)sad * 100.0 / (crnt->height * crnt->width) / (1 << s->bitdepth); diff = fabs(mafd - s->prev_mafd); ret = av_clipf(FFMIN(mafd, diff), 0, 100.0); s->prev_mafd = mafd; -- cgit v1.2.3