Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenoit Bolsee <benoit.bolsee@online.be>2008-11-10 00:42:30 +0300
committerBenoit Bolsee <benoit.bolsee@online.be>2008-11-10 00:42:30 +0300
commit2de476c88f4268f2dcfccfe6c9ce55a8697ac83e (patch)
tree86e8f63f7a20ff4d4fde6a2f898130f2cf1b927f /source/gameengine
parent0a8e8c8c9ea6e5acf65c2a4ee0c392328f5f96b8 (diff)
VideoTexture: Preserve alpha channel if present in video, images and sequences. Better detection of end of video.
Diffstat (limited to 'source/gameengine')
-rw-r--r--source/gameengine/VideoTexture/VideoBase.cpp34
-rw-r--r--source/gameengine/VideoTexture/VideoBase.h2
-rw-r--r--source/gameengine/VideoTexture/VideoFFmpeg.cpp86
-rw-r--r--source/gameengine/VideoTexture/VideoFFmpeg.h3
4 files changed, 87 insertions, 38 deletions
diff --git a/source/gameengine/VideoTexture/VideoBase.cpp b/source/gameengine/VideoTexture/VideoBase.cpp
index 038a04a55a8..10117c3af9e 100644
--- a/source/gameengine/VideoTexture/VideoBase.cpp
+++ b/source/gameengine/VideoTexture/VideoBase.cpp
@@ -54,22 +54,34 @@ void VideoBase::process (BYTE * sample)
if (m_image != NULL && !m_avail)
{
// filters used
- FilterRGB24 filtRGB;
- FilterYV12 filtYUV;
// convert video format to image
switch (m_format)
{
+ case RGBA32:
+ {
+ FilterRGBA32 filtRGBA;
+ // use filter object for format to convert image
+ filterImage(filtRGBA, sample, m_orgSize);
+ // finish
+ break;
+ }
case RGB24:
- // use filter object for format to convert image
- filterImage(filtRGB, sample, m_orgSize);
- // finish
- break;
+ {
+ FilterRGB24 filtRGB;
+ // use filter object for format to convert image
+ filterImage(filtRGB, sample, m_orgSize);
+ // finish
+ break;
+ }
case YV12:
- // use filter object for format to convert image
- filtYUV.setBuffs(sample, m_orgSize);
- filterImage(filtYUV, sample, m_orgSize);
- // finish
- break;
+ {
+ // use filter object for format to convert image
+ FilterYV12 filtYUV;
+ filtYUV.setBuffs(sample, m_orgSize);
+ filterImage(filtYUV, sample, m_orgSize);
+ // finish
+ break;
+ }
}
}
}
diff --git a/source/gameengine/VideoTexture/VideoBase.h b/source/gameengine/VideoTexture/VideoBase.h
index 5bb635e1f19..15ecb7a78f4 100644
--- a/source/gameengine/VideoTexture/VideoBase.h
+++ b/source/gameengine/VideoTexture/VideoBase.h
@@ -39,7 +39,7 @@ const int SourceStopped = 3;
// video source formats
-enum VideoFormat { None, RGB24, YV12 };
+enum VideoFormat { None, RGB24, YV12, RGBA32 };
/// base class for video source
diff --git a/source/gameengine/VideoTexture/VideoFFmpeg.cpp b/source/gameengine/VideoTexture/VideoFFmpeg.cpp
index af6dfec8e87..02798c7e596 100644
--- a/source/gameengine/VideoTexture/VideoFFmpeg.cpp
+++ b/source/gameengine/VideoTexture/VideoFFmpeg.cpp
@@ -54,7 +54,7 @@ VideoFFmpeg::VideoFFmpeg (HRESULT * hRslt) : VideoBase(),
m_codec(NULL), m_formatCtx(NULL), m_codecCtx(NULL),
m_frame(NULL), m_frameDeinterlaced(NULL), m_frameRGB(NULL), m_imgConvertCtx(NULL),
m_deinterlace(false), m_preseek(0), m_videoStream(-1), m_baseFrameRate(25.0),
-m_lastFrame(-1), m_curPosition(-1), m_startTime(0),
+m_lastFrame(-1), m_eof(false), m_curPosition(-1), m_startTime(0),
m_captWidth(0), m_captHeight(0), m_captRate(0.f), m_isImage(false)
{
// set video format
@@ -191,7 +191,6 @@ int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AV
m_frameDeinterlaced = avcodec_alloc_frame();
m_frameRGB = avcodec_alloc_frame();
-
// allocate buffer if deinterlacing is required
avpicture_fill((AVPicture*)m_frameDeinterlaced,
(uint8_t*)MEM_callocN(avpicture_get_size(
@@ -200,24 +199,51 @@ int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AV
"ffmpeg deinterlace"),
m_codecCtx->pix_fmt, m_codecCtx->width, m_codecCtx->height);
- // allocate buffer to store final decoded frame
- avpicture_fill((AVPicture*)m_frameRGB,
- (uint8_t*)MEM_callocN(avpicture_get_size(
- PIX_FMT_RGB24,
- m_codecCtx->width, m_codecCtx->height),
- "ffmpeg rgb"),
- PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
- // allocate sws context
- m_imgConvertCtx = sws_getContext(
- m_codecCtx->width,
- m_codecCtx->height,
- m_codecCtx->pix_fmt,
- m_codecCtx->width,
- m_codecCtx->height,
- PIX_FMT_RGB24,
- SWS_FAST_BILINEAR,
- NULL, NULL, NULL);
-
+ // check if the pixel format supports Alpha
+ if (m_codecCtx->pix_fmt == PIX_FMT_RGB32 ||
+ m_codecCtx->pix_fmt == PIX_FMT_BGR32 ||
+ m_codecCtx->pix_fmt == PIX_FMT_RGB32_1 ||
+ m_codecCtx->pix_fmt == PIX_FMT_BGR32_1)
+ {
+ // allocate buffer to store final decoded frame
+ m_format = RGBA32;
+ avpicture_fill((AVPicture*)m_frameRGB,
+ (uint8_t*)MEM_callocN(avpicture_get_size(
+ PIX_FMT_RGBA,
+ m_codecCtx->width, m_codecCtx->height),
+ "ffmpeg rgba"),
+ PIX_FMT_RGBA, m_codecCtx->width, m_codecCtx->height);
+ // allocate sws context
+ m_imgConvertCtx = sws_getContext(
+ m_codecCtx->width,
+ m_codecCtx->height,
+ m_codecCtx->pix_fmt,
+ m_codecCtx->width,
+ m_codecCtx->height,
+ PIX_FMT_RGBA,
+ SWS_FAST_BILINEAR,
+ NULL, NULL, NULL);
+ } else
+ {
+ // allocate buffer to store final decoded frame
+ m_format = RGB24;
+ avpicture_fill((AVPicture*)m_frameRGB,
+ (uint8_t*)MEM_callocN(avpicture_get_size(
+ PIX_FMT_RGB24,
+ m_codecCtx->width, m_codecCtx->height),
+ "ffmpeg rgb"),
+ PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
+ // allocate sws context
+ m_imgConvertCtx = sws_getContext(
+ m_codecCtx->width,
+ m_codecCtx->height,
+ m_codecCtx->pix_fmt,
+ m_codecCtx->width,
+ m_codecCtx->height,
+ PIX_FMT_RGB24,
+ SWS_FAST_BILINEAR,
+ NULL, NULL, NULL);
+ }
if (!m_imgConvertCtx) {
avcodec_close(m_codecCtx);
av_close_input_file(m_formatCtx);
@@ -481,7 +507,7 @@ void VideoFFmpeg::setPositions (void)
// set video start time
m_startTime = PIL_check_seconds_timer();
// if file is played and actual position is before end position
- if (m_isFile && m_lastFrame >= 0 && m_lastFrame < m_range[1] * actFrameRate())
+ if (m_isFile && !m_eof && m_lastFrame >= 0 && m_lastFrame < m_range[1] * actFrameRate())
// continue from actual position
m_startTime -= double(m_lastFrame) / actFrameRate();
else
@@ -520,7 +546,8 @@ bool VideoFFmpeg::grabFrame(long position)
}
}
// if the position is not in preseek, do a direct jump
- if (position != m_curPosition + 1) {
+ if (position != m_curPosition + 1)
+ {
double timeBase = av_q2d(m_formatCtx->streams[m_videoStream]->time_base);
long long pos = (long long)
((long long) (position - m_preseek) * AV_TIME_BASE / m_baseFrameRate);
@@ -532,10 +559,16 @@ bool VideoFFmpeg::grabFrame(long position)
if (startTs != AV_NOPTS_VALUE)
pos += (long long)(startTs * AV_TIME_BASE * timeBase);
- av_seek_frame(m_formatCtx, -1, pos, AVSEEK_FLAG_BACKWARD);
- // current position is now lost, guess a value.
- // It's not important because it will be set at this end of this function
- m_curPosition = position - m_preseek - 1;
+ if (position <= m_curPosition || !m_eof)
+ {
+ // no need to seek past the end of the file
+ if (av_seek_frame(m_formatCtx, -1, pos, AVSEEK_FLAG_BACKWARD) >= 0)
+ {
+ // current position is now lost, guess a value.
+ // It's not important because it will be set at this end of this function
+ m_curPosition = position - m_preseek - 1;
+ }
+ }
// this is the timestamp of the frame we're looking for
targetTs = (long long)(((double) position) / m_baseFrameRate / timeBase);
if (startTs != AV_NOPTS_VALUE)
@@ -599,6 +632,7 @@ bool VideoFFmpeg::grabFrame(long position)
}
av_free_packet(&packet);
}
+ m_eof = !frameLoaded;
if (frameLoaded)
m_curPosition = position;
return frameLoaded;
diff --git a/source/gameengine/VideoTexture/VideoFFmpeg.h b/source/gameengine/VideoTexture/VideoFFmpeg.h
index db2cb293d8b..e60f1727aab 100644
--- a/source/gameengine/VideoTexture/VideoFFmpeg.h
+++ b/source/gameengine/VideoTexture/VideoFFmpeg.h
@@ -117,6 +117,9 @@ protected:
/// last displayed frame
long m_lastFrame;
+ /// end of file reached
+ bool m_eof;
+
/// current file pointer position in file expressed in frame number
long m_curPosition;