Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSergey Sharybin <sergey.vfx@gmail.com>2012-06-18 14:29:11 +0400
committerSergey Sharybin <sergey.vfx@gmail.com>2012-06-18 14:29:11 +0400
commit0d64e050ea10ef9887323613c2fc6b429ebd53c9 (patch)
tree8954ec1670200f2f15e8379c9498e9f8a529802e
parent5e6e9bd616840cb1c9d4f41d333540f5294548e9 (diff)
Reduce amount of deprecated symbols used from FFmpeg
This switches some areas of Blender which are related on FFmpeg stuff from deprecated symbols to currently supported one. Pretty straightforward changes based on documentation of FFmpeg's API which symbols should be now used. This should make Blender compatible with recent FFmpeg 0.11. Should be no functional changes.
-rw-r--r--intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp16
-rw-r--r--intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp10
-rw-r--r--intern/ffmpeg/ffmpeg_compat.h15
-rw-r--r--source/blender/blenkernel/intern/writeffmpeg.c34
-rw-r--r--source/blender/imbuf/intern/anim_movie.c4
-rw-r--r--source/blender/imbuf/intern/indexer.c16
-rw-r--r--source/blender/imbuf/intern/util.c4
-rw-r--r--source/gameengine/VideoTexture/VideoFFmpeg.cpp46
-rw-r--r--source/gameengine/VideoTexture/VideoFFmpeg.h6
9 files changed, 87 insertions, 64 deletions
diff --git a/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp b/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp
index 6553073c54e..28a14a9cfc7 100644
--- a/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp
+++ b/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp
@@ -143,23 +143,23 @@ void AUD_FFMPEGReader::init()
switch(m_codecCtx->sample_fmt)
{
- case SAMPLE_FMT_U8:
+ case AV_SAMPLE_FMT_U8:
m_convert = AUD_convert_u8_float;
m_specs.format = AUD_FORMAT_U8;
break;
- case SAMPLE_FMT_S16:
+ case AV_SAMPLE_FMT_S16:
m_convert = AUD_convert_s16_float;
m_specs.format = AUD_FORMAT_S16;
break;
- case SAMPLE_FMT_S32:
+ case AV_SAMPLE_FMT_S32:
m_convert = AUD_convert_s32_float;
m_specs.format = AUD_FORMAT_S32;
break;
- case SAMPLE_FMT_FLT:
+ case AV_SAMPLE_FMT_FLT:
m_convert = AUD_convert_copy<float>;
m_specs.format = AUD_FORMAT_FLOAT32;
break;
- case SAMPLE_FMT_DBL:
+ case AV_SAMPLE_FMT_DBL:
m_convert = AUD_convert_double_float;
m_specs.format = AUD_FORMAT_FLOAT64;
break;
@@ -189,7 +189,7 @@ AUD_FFMPEGReader::AUD_FFMPEGReader(std::string filename) :
}
catch(AUD_Exception&)
{
- av_close_input_file(m_formatCtx);
+ avformat_close_input(&m_formatCtx);
throw;
}
}
@@ -227,7 +227,7 @@ AUD_FFMPEGReader::AUD_FFMPEGReader(AUD_Reference<AUD_Buffer> buffer) :
}
catch(AUD_Exception&)
{
- av_close_input_stream(m_formatCtx);
+ avformat_close_input(&m_formatCtx);
av_free(m_aviocontext);
throw;
}
@@ -239,7 +239,7 @@ AUD_FFMPEGReader::~AUD_FFMPEGReader()
if(m_aviocontext)
{
- av_close_input_stream(m_formatCtx);
+ avformat_close_input(&m_formatCtx);
av_free(m_aviocontext);
}
else
diff --git a/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp b/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp
index 702c366c4df..2b34348da81 100644
--- a/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp
+++ b/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp
@@ -133,23 +133,23 @@ AUD_FFMPEGWriter::AUD_FFMPEGWriter(std::string filename, AUD_DeviceSpecs specs,
{
case AUD_FORMAT_U8:
m_convert = AUD_convert_float_u8;
- m_codecCtx->sample_fmt = SAMPLE_FMT_U8;
+ m_codecCtx->sample_fmt = AV_SAMPLE_FMT_U8;
break;
case AUD_FORMAT_S16:
m_convert = AUD_convert_float_s16;
- m_codecCtx->sample_fmt = SAMPLE_FMT_S16;
+ m_codecCtx->sample_fmt = AV_SAMPLE_FMT_S16;
break;
case AUD_FORMAT_S32:
m_convert = AUD_convert_float_s32;
- m_codecCtx->sample_fmt = SAMPLE_FMT_S32;
+ m_codecCtx->sample_fmt = AV_SAMPLE_FMT_S32;
break;
case AUD_FORMAT_FLOAT32:
m_convert = AUD_convert_copy<float>;
- m_codecCtx->sample_fmt = SAMPLE_FMT_FLT;
+ m_codecCtx->sample_fmt = AV_SAMPLE_FMT_FLT;
break;
case AUD_FORMAT_FLOAT64:
m_convert = AUD_convert_float_double;
- m_codecCtx->sample_fmt = SAMPLE_FMT_DBL;
+ m_codecCtx->sample_fmt = AV_SAMPLE_FMT_DBL;
break;
default:
AUD_THROW(AUD_ERROR_FFMPEG, format_error);
diff --git a/intern/ffmpeg/ffmpeg_compat.h b/intern/ffmpeg/ffmpeg_compat.h
index 9dbbb46ce40..703c528bdea 100644
--- a/intern/ffmpeg/ffmpeg_compat.h
+++ b/intern/ffmpeg/ffmpeg_compat.h
@@ -40,7 +40,6 @@
#endif
#include <libswscale/swscale.h>
-#include <libavcodec/opt.h>
#if (LIBAVFORMAT_VERSION_MAJOR > 52) || ((LIBAVFORMAT_VERSION_MAJOR >= 52) && (LIBAVFORMAT_VERSION_MINOR >= 105))
#define FFMPEG_HAVE_AVIO 1
@@ -76,6 +75,20 @@
#define FFMPEG_FFV1_ALPHA_SUPPORTED
#endif
+#if ((LIBAVFORMAT_VERSION_MAJOR < 53) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR < 24)) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR < 24) && (LIBAVFORMAT_VERSION_MICRO < 2)))
+#define avformat_close_input(x) av_close_input_file(*(x))
+#endif
+
+#if ((LIBAVFORMAT_VERSION_MAJOR > 53) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR > 32)) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR == 24) && (LIBAVFORMAT_VERSION_MICRO >= 100)))
+void ff_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp);
+
+static inline
+void av_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp)
+{
+ ff_update_cur_dts(s, ref_st, timestamp);
+}
+#endif
+
#ifndef FFMPEG_HAVE_AVIO
#define AVIO_FLAG_WRITE URL_WRONLY
#define avio_open url_fopen
diff --git a/source/blender/blenkernel/intern/writeffmpeg.c b/source/blender/blenkernel/intern/writeffmpeg.c
index 40471514b48..532bd257ae1 100644
--- a/source/blender/blenkernel/intern/writeffmpeg.c
+++ b/source/blender/blenkernel/intern/writeffmpeg.c
@@ -42,8 +42,8 @@
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/rational.h>
+#include <libavutil/samplefmt.h>
#include <libswscale/swscale.h>
-#include <libavcodec/opt.h>
#include "MEM_guardedalloc.h"
@@ -615,7 +615,7 @@ static AVStream *alloc_audio_stream(RenderData *rd, int codec_id, AVFormatContex
c->sample_rate = rd->ffcodecdata.audio_mixrate;
c->bit_rate = ffmpeg_audio_bitrate * 1000;
- c->sample_fmt = SAMPLE_FMT_S16;
+ c->sample_fmt = AV_SAMPLE_FMT_S16;
c->channels = rd->ffcodecdata.audio_channels;
codec = avcodec_find_encoder(c->codec_id);
if (!codec) {
@@ -657,11 +657,21 @@ static AVStream *alloc_audio_stream(RenderData *rd, int codec_id, AVFormatContex
}
/* essential functions -- start, append, end */
+static void ffmpeg_dict_set_int(AVDictionary **dict, const char *key, int value)
+{
+ char buffer[32];
+
+ BLI_snprintf(buffer, sizeof(buffer), "%d", value);
+
+ av_dict_set(dict, key, buffer, 0);
+}
+
static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, ReportList *reports)
{
/* Handle to the output file */
AVFormatContext *of;
AVOutputFormat *fmt;
+ AVDictionary *opts = NULL;
char name[256];
const char **exts;
@@ -707,13 +717,14 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
of->oformat = fmt;
of->packet_size = rd->ffcodecdata.mux_packet_size;
if (ffmpeg_audio_codec != CODEC_ID_NONE) {
- of->mux_rate = rd->ffcodecdata.mux_rate;
+ ffmpeg_dict_set_int(&opts, "muxrate", rd->ffcodecdata.mux_rate);
}
else {
- of->mux_rate = 0;
+ av_dict_set(&opts, "muxrate", "0", 0);
}
- of->preload = (int)(0.5 * AV_TIME_BASE);
+ ffmpeg_dict_set_int(&opts, "preload", (int)(0.5 * AV_TIME_BASE));
+
of->max_delay = (int)(0.7 * AV_TIME_BASE);
fmt->audio_codec = ffmpeg_audio_codec;
@@ -776,6 +787,7 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
fmt->audio_codec = CODEC_ID_PCM_S16LE;
if (ffmpeg_audio_codec != CODEC_ID_NONE && rd->ffcodecdata.audio_mixrate != 48000 && rd->ffcodecdata.audio_channels != 2) {
BKE_report(reports, RPT_ERROR, "FFMPEG only supports 48khz / stereo audio for DV!");
+ av_dict_free(&opts);
return 0;
}
}
@@ -785,6 +797,7 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
printf("alloc video stream %p\n", video_stream);
if (!video_stream) {
BKE_report(reports, RPT_ERROR, "Error initializing video stream.");
+ av_dict_free(&opts);
return 0;
}
}
@@ -793,27 +806,26 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
audio_stream = alloc_audio_stream(rd, fmt->audio_codec, of);
if (!audio_stream) {
BKE_report(reports, RPT_ERROR, "Error initializing audio stream.");
+ av_dict_free(&opts);
return 0;
}
}
- if (av_set_parameters(of, NULL) < 0) {
- BKE_report(reports, RPT_ERROR, "Error setting output parameters.");
- return 0;
- }
if (!(fmt->flags & AVFMT_NOFILE)) {
if (avio_open(&of->pb, name, AVIO_FLAG_WRITE) < 0) {
BKE_report(reports, RPT_ERROR, "Could not open file for writing.");
+ av_dict_free(&opts);
return 0;
}
}
-
- if (av_write_header(of) < 0) {
+ if (avformat_write_header(of, NULL) < 0) {
BKE_report(reports, RPT_ERROR, "Could not initialize streams. Probably unsupported codec combination.");
+ av_dict_free(&opts);
return 0;
}
outfile = of;
av_dump_format(of, 0, name, 1);
+ av_dict_free(&opts);
return 1;
}
diff --git a/source/blender/imbuf/intern/anim_movie.c b/source/blender/imbuf/intern/anim_movie.c
index 2370dbeebc5..f777d40dca9 100644
--- a/source/blender/imbuf/intern/anim_movie.c
+++ b/source/blender/imbuf/intern/anim_movie.c
@@ -445,7 +445,7 @@ static int startffmpeg(struct anim *anim)
int i, videoStream;
AVCodec *pCodec;
- AVFormatContext *pFormatCtx;
+ AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx;
int frs_num;
double frs_den;
@@ -464,7 +464,7 @@ static int startffmpeg(struct anim *anim)
do_init_ffmpeg();
- if (av_open_input_file(&pFormatCtx, anim->name, NULL, 0, NULL) != 0) {
+ if (avformat_open_input(&pFormatCtx, anim->name, NULL, NULL) != 0) {
return -1;
}
diff --git a/source/blender/imbuf/intern/indexer.c b/source/blender/imbuf/intern/indexer.c
index 11da2f4af91..0ccd2680461 100644
--- a/source/blender/imbuf/intern/indexer.c
+++ b/source/blender/imbuf/intern/indexer.c
@@ -531,13 +531,6 @@ static struct proxy_output_ctx *alloc_proxy_output_ffmpeg(
rv->c->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
- if (av_set_parameters(rv->of, NULL) < 0) {
- fprintf(stderr, "Couldn't set output parameters? "
- "Proxy not built!\n");
- av_free(rv->of);
- return 0;
- }
-
if (avio_open(&rv->of->pb, fname, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Couldn't open outputfile! "
"Proxy not built!\n");
@@ -574,7 +567,12 @@ static struct proxy_output_ctx *alloc_proxy_output_ffmpeg(
NULL, NULL, NULL);
}
- av_write_header(rv->of);
+ if (avformat_write_header(rv->of, NULL) < 0) {
+ fprintf(stderr, "Couldn't set output parameters? "
+ "Proxy not built!\n");
+ av_free(rv->of);
+ return 0;
+ }
return rv;
}
@@ -737,7 +735,7 @@ static IndexBuildContext *index_ffmpeg_create_context(struct anim *anim, IMB_Tim
memset(context->proxy_ctx, 0, sizeof(context->proxy_ctx));
memset(context->indexer, 0, sizeof(context->indexer));
- if (av_open_input_file(&context->iFormatCtx, anim->name, NULL, 0, NULL) != 0) {
+ if (avformat_open_input(&context->iFormatCtx, anim->name, NULL, NULL) != 0) {
MEM_freeN(context);
return NULL;
}
diff --git a/source/blender/imbuf/intern/util.c b/source/blender/imbuf/intern/util.c
index a86e2bed0e5..92c10a094d3 100644
--- a/source/blender/imbuf/intern/util.c
+++ b/source/blender/imbuf/intern/util.c
@@ -247,7 +247,7 @@ void do_init_ffmpeg(void)
static int isffmpeg(const char *filename)
{
- AVFormatContext *pFormatCtx;
+ AVFormatContext *pFormatCtx = NULL;
unsigned int i;
int videoStream;
AVCodec *pCodec;
@@ -268,7 +268,7 @@ static int isffmpeg(const char *filename)
return 0;
}
- if (av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL) != 0) {
+ if (avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0) {
if (UTIL_DEBUG) fprintf(stderr, "isffmpeg: av_open_input_file failed\n");
return 0;
}
diff --git a/source/gameengine/VideoTexture/VideoFFmpeg.cpp b/source/gameengine/VideoTexture/VideoFFmpeg.cpp
index f4d3fb75223..4586a50e6a9 100644
--- a/source/gameengine/VideoTexture/VideoFFmpeg.cpp
+++ b/source/gameengine/VideoTexture/VideoFFmpeg.cpp
@@ -162,14 +162,14 @@ void VideoFFmpeg::initParams (short width, short height, float rate, bool image)
}
-int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVFormatParameters *formatParams)
+int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVDictionary **formatParams)
{
- AVFormatContext *formatCtx;
+ AVFormatContext *formatCtx = NULL;
int i, videoStream;
AVCodec *codec;
AVCodecContext *codecCtx;
- if (av_open_input_file(&formatCtx, filename, inputFormat, 0, formatParams)!=0)
+ if (avformat_open_input(&formatCtx, filename, inputFormat, formatParams)!=0)
return -1;
if (av_find_stream_info(formatCtx)<0)
@@ -545,11 +545,7 @@ void VideoFFmpeg::openFile (char * filename)
// but it is really not desirable to seek on http file, so force streaming.
// It would be good to find this information from the context but there are no simple indication
!strncmp(filename, "http://", 7) ||
-#ifdef FFMPEG_PB_IS_POINTER
- (m_formatCtx->pb && m_formatCtx->pb->is_streamed)
-#else
- m_formatCtx->pb.is_streamed
-#endif
+ (m_formatCtx->pb && !m_formatCtx->pb->seekable)
)
{
// the file is in fact a streaming source, treat as cam to prevent seeking
@@ -586,14 +582,12 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
{
// open camera source
AVInputFormat *inputFormat;
- AVFormatParameters formatParams;
- AVRational frameRate;
+ AVDictionary *formatParams = NULL;
char filename[28], rateStr[20];
char *p;
do_init_ffmpeg();
- memset(&formatParams, 0, sizeof(formatParams));
#ifdef WIN32
// video capture on windows only through Video For Windows driver
inputFormat = av_find_input_format("vfwcap");
@@ -623,7 +617,13 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
sprintf(filename, "/dev/dv1394/%d", camIdx);
} else
{
- inputFormat = av_find_input_format("video4linux");
+ const char *formats[] = {"video4linux2,v4l2", "video4linux2", "video4linux"};
+ int i, formatsCount = sizeof(formats) / sizeof(char*);
+ for (i = 0; i < formatsCount; i++) {
+ inputFormat = av_find_input_format(formats[i]);
+ if (inputFormat)
+ break;
+ }
sprintf(filename, "/dev/video%d", camIdx);
}
if (!inputFormat)
@@ -637,20 +637,22 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
if ((p = strchr(filename, ':')) != 0)
*p = 0;
}
- if (file && (p = strchr(file, ':')) != NULL)
- formatParams.standard = p+1;
+ if (file && (p = strchr(file, ':')) != NULL) {
+ av_dict_set(&formatParams, "standard", p+1, 0);
+ }
#endif
//frame rate
if (m_captRate <= 0.f)
m_captRate = defFrameRate;
sprintf(rateStr, "%f", m_captRate);
- av_parse_video_rate(&frameRate, rateStr);
- // populate format parameters
- // need to specify the time base = inverse of rate
- formatParams.time_base.num = frameRate.den;
- formatParams.time_base.den = frameRate.num;
- formatParams.width = m_captWidth;
- formatParams.height = m_captHeight;
+
+ av_dict_set(&formatParams, "framerate", rateStr, 0);
+
+ if (m_captWidth > 0 && m_captHeight > 0) {
+ char video_size[64];
+ BLI_snprintf(video_size, sizeof(video_size), "%dx%d", m_captWidth, m_captHeight);
+ av_dict_set(&formatParams, "video_size", video_size, 0);
+ }
if (openStream(filename, inputFormat, &formatParams) != 0)
return;
@@ -665,6 +667,8 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
// no need to thread if the system has a single core
m_isThreaded = true;
}
+
+ av_dict_free(&formatParams);
}
// play video
diff --git a/source/gameengine/VideoTexture/VideoFFmpeg.h b/source/gameengine/VideoTexture/VideoFFmpeg.h
index d3458211949..e63032e0c66 100644
--- a/source/gameengine/VideoTexture/VideoFFmpeg.h
+++ b/source/gameengine/VideoTexture/VideoFFmpeg.h
@@ -46,10 +46,6 @@ extern "C" {
# define FFMPEG_CODEC_IS_POINTER 1
#endif
-#if LIBAVFORMAT_VERSION_INT >= (52 << 16)
-# define FFMPEG_PB_IS_POINTER 1
-#endif
-
#ifdef FFMPEG_CODEC_IS_POINTER
static inline AVCodecContext* get_codec_from_stream(AVStream* stream)
{
@@ -172,7 +168,7 @@ protected:
double actFrameRate (void) { return m_frameRate * m_baseFrameRate; }
/// common function to video file and capture
- int openStream(const char *filename, AVInputFormat *inputFormat, AVFormatParameters *formatParams);
+ int openStream(const char *filename, AVInputFormat *inputFormat, AVDictionary **formatParams);
/// check if a frame is available and load it in pFrame, return true if a frame could be retrieved
AVFrame* grabFrame(long frame);