Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'source/blender/imbuf/intern')
-rw-r--r--source/blender/imbuf/intern/IMB_anim.h9
-rw-r--r--source/blender/imbuf/intern/anim_movie.c273
-rw-r--r--source/blender/imbuf/intern/bmp.c3
-rw-r--r--source/blender/imbuf/intern/colormanagement.c33
-rw-r--r--source/blender/imbuf/intern/colormanagement_inline.c5
-rw-r--r--source/blender/imbuf/intern/divers.c28
-rw-r--r--source/blender/imbuf/intern/imageprocess.c1
-rw-r--r--source/blender/imbuf/intern/indexer.c4
-rw-r--r--source/blender/imbuf/intern/jpeg.c4
-rw-r--r--source/blender/imbuf/intern/openexr/openexr_api.cpp2
-rw-r--r--source/blender/imbuf/intern/readimage.c4
-rw-r--r--source/blender/imbuf/intern/tiff.c11
-rw-r--r--source/blender/imbuf/intern/transform.cc1
-rw-r--r--source/blender/imbuf/intern/util_gpu.c110
14 files changed, 321 insertions, 167 deletions
diff --git a/source/blender/imbuf/intern/IMB_anim.h b/source/blender/imbuf/intern/IMB_anim.h
index e99572adbb0..0ac1d7bfb74 100644
--- a/source/blender/imbuf/intern/IMB_anim.h
+++ b/source/blender/imbuf/intern/IMB_anim.h
@@ -109,17 +109,22 @@ struct anim {
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
const AVCodec *pCodec;
- AVFrame *pFrame;
- int pFrameComplete;
AVFrame *pFrameRGB;
AVFrame *pFrameDeinterlaced;
struct SwsContext *img_convert_ctx;
int videoStream;
+ AVFrame *pFrame;
+ bool pFrame_complete;
+ AVFrame *pFrame_backup;
+ bool pFrame_backup_complete;
+
struct ImBuf *cur_frame_final;
int64_t cur_pts;
int64_t cur_key_frame_pts;
AVPacket *cur_packet;
+
+ bool seek_before_decode;
#endif
char index_dir[768];
diff --git a/source/blender/imbuf/intern/anim_movie.c b/source/blender/imbuf/intern/anim_movie.c
index 0052ce19aa1..52ed68a1ff3 100644
--- a/source/blender/imbuf/intern/anim_movie.c
+++ b/source/blender/imbuf/intern/anim_movie.c
@@ -675,7 +675,7 @@ static int startffmpeg(struct anim *anim)
anim->orientation = 0;
anim->framesize = anim->x * anim->y * 4;
- anim->cur_position = -1;
+ anim->cur_position = 0;
anim->cur_frame_final = 0;
anim->cur_pts = -1;
anim->cur_key_frame_pts = -1;
@@ -683,7 +683,9 @@ static int startffmpeg(struct anim *anim)
anim->cur_packet->stream_index = -1;
anim->pFrame = av_frame_alloc();
- anim->pFrameComplete = false;
+ anim->pFrame_backup = av_frame_alloc();
+ anim->pFrame_backup_complete = false;
+ anim->pFrame_complete = false;
anim->pFrameDeinterlaced = av_frame_alloc();
anim->pFrameRGB = av_frame_alloc();
anim->pFrameRGB->format = AV_PIX_FMT_RGBA;
@@ -698,6 +700,7 @@ static int startffmpeg(struct anim *anim)
av_frame_free(&anim->pFrameRGB);
av_frame_free(&anim->pFrameDeinterlaced);
av_frame_free(&anim->pFrame);
+ av_frame_free(&anim->pFrame_backup);
anim->pCodecCtx = NULL;
return -1;
}
@@ -710,6 +713,7 @@ static int startffmpeg(struct anim *anim)
av_frame_free(&anim->pFrameRGB);
av_frame_free(&anim->pFrameDeinterlaced);
av_frame_free(&anim->pFrame);
+ av_frame_free(&anim->pFrame_backup);
anim->pCodecCtx = NULL;
return -1;
}
@@ -747,6 +751,7 @@ static int startffmpeg(struct anim *anim)
av_frame_free(&anim->pFrameRGB);
av_frame_free(&anim->pFrameDeinterlaced);
av_frame_free(&anim->pFrame);
+ av_frame_free(&anim->pFrame_backup);
anim->pCodecCtx = NULL;
return -1;
}
@@ -781,22 +786,71 @@ static int startffmpeg(struct anim *anim)
return 0;
}
+static double ffmpeg_steps_per_frame_get(struct anim *anim)
+{
+ AVStream *v_st = anim->pFormatCtx->streams[anim->videoStream];
+ AVRational time_base = v_st->time_base;
+ AVRational frame_rate = av_guess_frame_rate(anim->pFormatCtx, v_st, NULL);
+ return av_q2d(av_inv_q(av_mul_q(frame_rate, time_base)));
+}
+
+/* Store backup frame.
+ * With VFR movies, if PTS is not matched perfectly, scanning continues to look for next PTS.
+ * It is likely to overshoot and scanning stops. Having previous frame backed up, it is possible
+ * to use it when overshoot happens.
+ */
+static void ffmpeg_double_buffer_backup_frame_store(struct anim *anim, int64_t pts_to_search)
+{
+ /* `anim->pFrame` is beyond `pts_to_search`. Don't store it. */
+ if (anim->pFrame_backup_complete && anim->cur_pts >= pts_to_search) {
+ return;
+ }
+ if (!anim->pFrame_complete) {
+ return;
+ }
+
+ if (anim->pFrame_backup_complete) {
+ av_frame_unref(anim->pFrame_backup);
+ }
+
+ av_frame_move_ref(anim->pFrame_backup, anim->pFrame);
+ anim->pFrame_backup_complete = true;
+}
+
+/* Free stored backup frame. */
+static void ffmpeg_double_buffer_backup_frame_clear(struct anim *anim)
+{
+ if (anim->pFrame_backup_complete) {
+ av_frame_unref(anim->pFrame_backup);
+ }
+ anim->pFrame_backup_complete = false;
+}
+
+/* Return recently decoded frame. If it does not exist, return frame from backup buffer. */
+static AVFrame *ffmpeg_double_buffer_frame_fallback_get(struct anim *anim)
+{
+ av_log(anim->pFormatCtx, AV_LOG_ERROR, "DECODE UNHAPPY: PTS not matched!\n");
+
+ if (anim->pFrame_complete) {
+ return anim->pFrame;
+ }
+ if (anim->pFrame_backup_complete) {
+ return anim->pFrame_backup;
+ }
+ return NULL;
+}
+
/* postprocess the image in anim->pFrame and do color conversion
* and deinterlacing stuff.
*
* Output is anim->cur_frame_final
*/
-static void ffmpeg_postprocess(struct anim *anim)
+static void ffmpeg_postprocess(struct anim *anim, AVFrame *input)
{
- AVFrame *input = anim->pFrame;
ImBuf *ibuf = anim->cur_frame_final;
int filter_y = 0;
- if (!anim->pFrameComplete) {
- return;
- }
-
/* This means the data wasn't read properly,
* this check stops crashing */
if (input->data[0] == 0 && input->data[1] == 0 && input->data[2] == 0 && input->data[3] == 0) {
@@ -808,7 +862,7 @@ static void ffmpeg_postprocess(struct anim *anim)
av_log(anim->pFormatCtx,
AV_LOG_DEBUG,
- " POSTPROC: anim->pFrame planes: %p %p %p %p\n",
+ " POSTPROC: AVFrame planes: %p %p %p %p\n",
input->data[0],
input->data[1],
input->data[2],
@@ -852,6 +906,52 @@ static void ffmpeg_postprocess(struct anim *anim)
}
}
+static void final_frame_log(struct anim *anim,
+ int64_t frame_pts_start,
+ int64_t frame_pts_end,
+ const char *str)
+{
+ av_log(anim->pFormatCtx,
+ AV_LOG_INFO,
+ "DECODE HAPPY: %s frame PTS range %" PRId64 " - %" PRId64 ".\n",
+ str,
+ frame_pts_start,
+ frame_pts_end);
+}
+
+static bool ffmpeg_pts_isect(int64_t pts_start, int64_t pts_end, int64_t pts_to_search)
+{
+ return pts_start <= pts_to_search && pts_to_search < pts_end;
+}
+
+/* Return frame that matches `pts_to_search`, NULL if matching frame does not exist. */
+static AVFrame *ffmpeg_frame_by_pts_get(struct anim *anim, int64_t pts_to_search)
+{
+ /* NOTE: `frame->pts + frame->pkt_duration` does not always match pts of next frame.
+ * See footage from T86361. Here it is OK to use, because PTS must match current or backup frame.
+ * If there is no current frame, return NULL.
+ */
+ if (!anim->pFrame_complete) {
+ return NULL;
+ }
+
+ const bool backup_frame_ready = anim->pFrame_backup_complete;
+ const int64_t recent_start = av_get_pts_from_frame(anim->pFrame);
+ const int64_t recent_end = recent_start + anim->pFrame->pkt_duration;
+ const int64_t backup_start = backup_frame_ready ? av_get_pts_from_frame(anim->pFrame_backup) : 0;
+
+ AVFrame *best_frame = NULL;
+ if (ffmpeg_pts_isect(recent_start, recent_end, pts_to_search)) {
+ final_frame_log(anim, recent_start, recent_end, "Recent");
+ best_frame = anim->pFrame;
+ }
+ else if (backup_frame_ready && ffmpeg_pts_isect(backup_start, recent_start, pts_to_search)) {
+ final_frame_log(anim, backup_start, recent_start, "Backup");
+ best_frame = anim->pFrame_backup;
+ }
+ return best_frame;
+}
+
static void ffmpeg_decode_store_frame_pts(struct anim *anim)
{
anim->cur_pts = av_get_pts_from_frame(anim->pFrame);
@@ -863,7 +963,7 @@ static void ffmpeg_decode_store_frame_pts(struct anim *anim)
av_log(anim->pFormatCtx,
AV_LOG_DEBUG,
" FRAME DONE: cur_pts=%" PRId64 ", guessed_pts=%" PRId64 "\n",
- (anim->pFrame->pts == AV_NOPTS_VALUE) ? -1 : (int64_t)anim->pFrame->pts,
+ av_get_pts_from_frame(anim->pFrame),
(int64_t)anim->cur_pts);
}
@@ -888,8 +988,8 @@ static int ffmpeg_decode_video_frame(struct anim *anim)
/* Sometimes, decoder returns more than one frame per sent packet. Check if frames are available.
* This frames must be read, otherwise decoding will fail. See T91405. */
- anim->pFrameComplete = avcodec_receive_frame(anim->pCodecCtx, anim->pFrame) == 0;
- if (anim->pFrameComplete) {
+ anim->pFrame_complete = avcodec_receive_frame(anim->pCodecCtx, anim->pFrame) == 0;
+ if (anim->pFrame_complete) {
av_log(anim->pFormatCtx, AV_LOG_DEBUG, " DECODE FROM CODEC BUFFER\n");
ffmpeg_decode_store_frame_pts(anim);
return 1;
@@ -902,20 +1002,22 @@ static int ffmpeg_decode_video_frame(struct anim *anim)
}
while ((rval = ffmpeg_read_video_frame(anim, anim->cur_packet)) >= 0) {
+ if (anim->cur_packet->stream_index != anim->videoStream) {
+ continue;
+ }
+
av_log(anim->pFormatCtx,
AV_LOG_DEBUG,
- "%sREAD: strID=%d (VID: %d) dts=%" PRId64 " pts=%" PRId64 " %s\n",
- (anim->cur_packet->stream_index == anim->videoStream) ? "->" : " ",
+ "READ: strID=%d dts=%" PRId64 " pts=%" PRId64 " %s\n",
anim->cur_packet->stream_index,
- anim->videoStream,
(anim->cur_packet->dts == AV_NOPTS_VALUE) ? -1 : (int64_t)anim->cur_packet->dts,
(anim->cur_packet->pts == AV_NOPTS_VALUE) ? -1 : (int64_t)anim->cur_packet->pts,
(anim->cur_packet->flags & AV_PKT_FLAG_KEY) ? " KEY" : "");
avcodec_send_packet(anim->pCodecCtx, anim->cur_packet);
- anim->pFrameComplete = avcodec_receive_frame(anim->pCodecCtx, anim->pFrame) == 0;
+ anim->pFrame_complete = avcodec_receive_frame(anim->pCodecCtx, anim->pFrame) == 0;
- if (anim->pFrameComplete) {
+ if (anim->pFrame_complete) {
ffmpeg_decode_store_frame_pts(anim);
break;
}
@@ -926,9 +1028,9 @@ static int ffmpeg_decode_video_frame(struct anim *anim)
if (rval == AVERROR_EOF) {
/* Flush any remaining frames out of the decoder. */
avcodec_send_packet(anim->pCodecCtx, NULL);
- anim->pFrameComplete = avcodec_receive_frame(anim->pCodecCtx, anim->pFrame) == 0;
+ anim->pFrame_complete = avcodec_receive_frame(anim->pCodecCtx, anim->pFrame) == 0;
- if (anim->pFrameComplete) {
+ if (anim->pFrame_complete) {
ffmpeg_decode_store_frame_pts(anim);
rval = 0;
}
@@ -990,15 +1092,6 @@ static int ffmpeg_seek_by_byte(AVFormatContext *pFormatCtx)
return false;
}
-static double ffmpeg_steps_per_frame_get(struct anim *anim)
-{
- AVStream *v_st = anim->pFormatCtx->streams[anim->videoStream];
- AVRational time_base = v_st->time_base;
- AVRational frame_rate = av_guess_frame_rate(anim->pFormatCtx, v_st, NULL);
- return av_q2d(av_inv_q(av_mul_q(frame_rate, time_base)));
- ;
-}
-
static int64_t ffmpeg_get_seek_pts(struct anim *anim, int64_t pts_to_search)
{
/* Step back half a frame position to make sure that we get the requested
@@ -1035,75 +1128,41 @@ static int64_t ffmpeg_get_pts_to_search(struct anim *anim,
return pts_to_search;
}
-/* Check if the pts will get us the same frame that we already have in memory from last decode. */
-static bool ffmpeg_pts_matches_last_frame(struct anim *anim, int64_t pts_to_search)
+static bool ffmpeg_is_first_frame_decode(struct anim *anim)
{
- if (anim->pFrame && anim->cur_frame_final) {
- int64_t diff = pts_to_search - anim->cur_pts;
- return diff >= 0 && diff < anim->pFrame->pkt_duration;
- }
-
- return false;
+ return anim->pFrame_complete == false;
}
-static bool ffmpeg_is_first_frame_decode(struct anim *anim, int position)
+static void ffmpeg_scan_log(struct anim *anim, int64_t pts_to_search)
{
- return position == 0 && anim->cur_position == -1;
+ int64_t frame_pts_start = av_get_pts_from_frame(anim->pFrame);
+ int64_t frame_pts_end = frame_pts_start + anim->pFrame->pkt_duration;
+ av_log(anim->pFormatCtx,
+ AV_LOG_DEBUG,
+ " SCAN WHILE: PTS range %" PRId64 " - %" PRId64 " in search of %" PRId64 "\n",
+ frame_pts_start,
+ frame_pts_end,
+ pts_to_search);
}
/* Decode frames one by one until its PTS matches pts_to_search. */
static void ffmpeg_decode_video_frame_scan(struct anim *anim, int64_t pts_to_search)
{
- av_log(anim->pFormatCtx, AV_LOG_DEBUG, "FETCH: within current GOP\n");
-
- av_log(anim->pFormatCtx,
- AV_LOG_DEBUG,
- "SCAN start: considering pts=%" PRId64 " in search of %" PRId64 "\n",
- (int64_t)anim->cur_pts,
- (int64_t)pts_to_search);
-
- int64_t start_gop_frame = anim->cur_key_frame_pts;
- bool scan_fuzzy = false;
-
- while (anim->cur_pts < pts_to_search) {
- av_log(anim->pFormatCtx,
- AV_LOG_DEBUG,
- " WHILE: pts=%" PRId64 " in search of %" PRId64 "\n",
- (int64_t)anim->cur_pts,
- (int64_t)pts_to_search);
- if (!ffmpeg_decode_video_frame(anim)) {
- break;
+ const int64_t start_gop_frame = anim->cur_key_frame_pts;
+ bool decode_error = false;
+
+ while (!decode_error && anim->cur_pts < pts_to_search) {
+ ffmpeg_scan_log(anim, pts_to_search);
+ ffmpeg_double_buffer_backup_frame_store(anim, pts_to_search);
+ decode_error = ffmpeg_decode_video_frame(anim) < 1;
+
+ /* We should not get a new GOP keyframe while scanning if seeking is working as intended.
+ * If this condition triggers, there may be and error in our seeking code.
+ * NOTE: This seems to happen if DTS value is used for seeking in ffmpeg internally. There
+ * seems to be no good way to handle such case. */
+ if (anim->seek_before_decode && start_gop_frame != anim->cur_key_frame_pts) {
+ av_log(anim->pFormatCtx, AV_LOG_ERROR, "SCAN: Frame belongs to an unexpected GOP!\n");
}
-
- if (start_gop_frame != anim->cur_key_frame_pts) {
- break;
- }
-
- if (anim->cur_pts < pts_to_search &&
- anim->cur_pts + anim->pFrame->pkt_duration > pts_to_search) {
- /* Our estimate of the pts was a bit off, but we have the frame we want. */
- av_log(anim->pFormatCtx, AV_LOG_DEBUG, "SCAN fuzzy frame match\n");
- scan_fuzzy = true;
- break;
- }
- }
-
- if (start_gop_frame != anim->cur_key_frame_pts) {
- /* We went into an other GOP frame. This should never happen as we should have positioned us
- * correctly by seeking into the GOP frame that contains the frame we want. */
- av_log(anim->pFormatCtx,
- AV_LOG_ERROR,
- "SCAN failed: completely lost in stream, "
- "bailing out at PTS=%" PRId64 ", searching for PTS=%" PRId64 "\n",
- (int64_t)anim->cur_pts,
- (int64_t)pts_to_search);
- }
-
- if (scan_fuzzy || anim->cur_pts == pts_to_search) {
- av_log(anim->pFormatCtx, AV_LOG_DEBUG, "SCAN HAPPY: we found our PTS!\n");
- }
- else {
- av_log(anim->pFormatCtx, AV_LOG_ERROR, "SCAN UNHAPPY: PTS not matched!\n");
}
}
@@ -1299,6 +1358,7 @@ static int ffmpeg_seek_to_key_frame(struct anim *anim,
/* Flush the internal buffers of ffmpeg. This needs to be done after seeking to avoid decoding
* errors. */
avcodec_flush_buffers(anim->pCodecCtx);
+ ffmpeg_double_buffer_backup_frame_clear(anim);
anim->cur_pts = -1;
@@ -1310,6 +1370,13 @@ static int ffmpeg_seek_to_key_frame(struct anim *anim,
return ret;
}
+static bool ffmpeg_must_seek(struct anim *anim, int position)
+{
+ bool must_seek = position != anim->cur_position + 1 || ffmpeg_is_first_frame_decode(anim);
+ anim->seek_before_decode = must_seek;
+ return must_seek;
+}
+
static ImBuf *ffmpeg_fetchibuf(struct anim *anim, int position, IMB_Timecode_Type tc)
{
if (anim == NULL) {
@@ -1334,23 +1401,11 @@ static ImBuf *ffmpeg_fetchibuf(struct anim *anim, int position, IMB_Timecode_Typ
frame_rate,
start_pts);
- if (ffmpeg_pts_matches_last_frame(anim, pts_to_search)) {
- av_log(anim->pFormatCtx,
- AV_LOG_DEBUG,
- "FETCH: frame repeat: pts: %" PRId64 "\n",
- (int64_t)anim->cur_pts);
- IMB_refImBuf(anim->cur_frame_final);
- anim->cur_position = position;
- return anim->cur_frame_final;
+ if (ffmpeg_must_seek(anim, position)) {
+ ffmpeg_seek_to_key_frame(anim, position, tc_index, pts_to_search);
}
- if (position == anim->cur_position + 1 || ffmpeg_is_first_frame_decode(anim, position)) {
- av_log(anim->pFormatCtx, AV_LOG_DEBUG, "FETCH: no seek necessary, just continue...\n");
- ffmpeg_decode_video_frame(anim);
- }
- else if (ffmpeg_seek_to_key_frame(anim, position, tc_index, pts_to_search) >= 0) {
- ffmpeg_decode_video_frame_scan(anim, pts_to_search);
- }
+ ffmpeg_decode_video_frame_scan(anim, pts_to_search);
IMB_freeImBuf(anim->cur_frame_final);
@@ -1387,7 +1442,18 @@ static ImBuf *ffmpeg_fetchibuf(struct anim *anim, int position, IMB_Timecode_Typ
anim->cur_frame_final->rect_colorspace = colormanage_colorspace_get_named(anim->colorspace);
- ffmpeg_postprocess(anim);
+ AVFrame *final_frame = ffmpeg_frame_by_pts_get(anim, pts_to_search);
+ if (final_frame == NULL) {
+ /* No valid frame was decoded for requested PTS, fall back on most recent decoded frame, even
+ * if it is incorrect. */
+ final_frame = ffmpeg_double_buffer_frame_fallback_get(anim);
+ }
+
+ /* Even with the fallback from above it is possible that the current decode frame is NULL. In
+ * this case skip post-processing and return current image buffer. */
+ if (final_frame != NULL) {
+ ffmpeg_postprocess(anim, final_frame);
+ }
anim->cur_position = position;
@@ -1408,6 +1474,7 @@ static void free_anim_ffmpeg(struct anim *anim)
av_packet_free(&anim->cur_packet);
av_frame_free(&anim->pFrame);
+ av_frame_free(&anim->pFrame_backup);
av_frame_free(&anim->pFrameRGB);
av_frame_free(&anim->pFrameDeinterlaced);
diff --git a/source/blender/imbuf/intern/bmp.c b/source/blender/imbuf/intern/bmp.c
index 967cbd04813..af9b62f1a74 100644
--- a/source/blender/imbuf/intern/bmp.c
+++ b/source/blender/imbuf/intern/bmp.c
@@ -178,7 +178,6 @@ ImBuf *imb_bmp_decode(const uchar *mem, size_t size, int flags, char colorspace[
const char(*palette)[4] = (const char(*)[4])(mem + palette_offset);
const int startmask = ((1 << depth) - 1) << 8;
for (size_t i = y; i > 0; i--) {
- int index;
int bitoffs = 8;
int bitmask = startmask;
int nbytes = 0;
@@ -189,7 +188,7 @@ ImBuf *imb_bmp_decode(const uchar *mem, size_t size, int flags, char colorspace[
for (size_t j = x; j > 0; j--) {
bitoffs -= depth;
bitmask >>= depth;
- index = (bmp[0] & bitmask) >> bitoffs;
+ const int index = (bmp[0] & bitmask) >> bitoffs;
pcol = palette[index];
/* intentionally BGR -> RGB */
rect[0] = pcol[2];
diff --git a/source/blender/imbuf/intern/colormanagement.c b/source/blender/imbuf/intern/colormanagement.c
index 33873b5daa7..b62bdd5521d 100644
--- a/source/blender/imbuf/intern/colormanagement.c
+++ b/source/blender/imbuf/intern/colormanagement.c
@@ -2213,10 +2213,11 @@ void IMB_colormanagement_imbuf_to_byte_texture(unsigned char *out_buffer,
const struct ImBuf *ibuf,
const bool store_premultiplied)
{
- /* Byte buffer storage, only for sRGB and data texture since other
+ /* Byte buffer storage, only for sRGB, scene linear and data texture since other
* color space conversions can't be done on the GPU. */
BLI_assert(ibuf->rect && ibuf->rect_float == NULL);
BLI_assert(IMB_colormanagement_space_is_srgb(ibuf->rect_colorspace) ||
+ IMB_colormanagement_space_is_scene_linear(ibuf->rect_colorspace) ||
IMB_colormanagement_space_is_data(ibuf->rect_colorspace));
const unsigned char *in_buffer = (unsigned char *)ibuf->rect;
@@ -2481,22 +2482,21 @@ static ImBuf *imbuf_ensure_editable(ImBuf *ibuf, ImBuf *colormanaged_ibuf, bool
IMB_metadata_copy(colormanaged_ibuf, ibuf);
return colormanaged_ibuf;
}
- else {
- /* Render pipeline is constructing image buffer itself,
- * but it's re-using byte and float buffers from render result make copy of this buffers
- * here sine this buffers would be transformed to other color space here. */
- if (ibuf->rect && (ibuf->mall & IB_rect) == 0) {
- ibuf->rect = MEM_dupallocN(ibuf->rect);
- ibuf->mall |= IB_rect;
- }
- if (ibuf->rect_float && (ibuf->mall & IB_rectfloat) == 0) {
- ibuf->rect_float = MEM_dupallocN(ibuf->rect_float);
- ibuf->mall |= IB_rectfloat;
- }
+ /* Render pipeline is constructing image buffer itself,
+ * but it's re-using byte and float buffers from render result make copy of this buffers
+ * here sine this buffers would be transformed to other color space here. */
+ if (ibuf->rect && (ibuf->mall & IB_rect) == 0) {
+ ibuf->rect = MEM_dupallocN(ibuf->rect);
+ ibuf->mall |= IB_rect;
+ }
- return ibuf;
+ if (ibuf->rect_float && (ibuf->mall & IB_rectfloat) == 0) {
+ ibuf->rect_float = MEM_dupallocN(ibuf->rect_float);
+ ibuf->mall |= IB_rectfloat;
}
+
+ return ibuf;
}
ImBuf *IMB_colormanagement_imbuf_for_write(ImBuf *ibuf,
@@ -3175,6 +3175,11 @@ const char *IMB_colormanagement_colorspace_get_indexed_name(int index)
return "";
}
+const char *IMB_colormanagement_colorspace_get_name(const ColorSpace *colorspace)
+{
+ return colorspace->name;
+}
+
void IMB_colormanagement_colorspace_from_ibuf_ftype(
ColorManagedColorspaceSettings *colorspace_settings, ImBuf *ibuf)
{
diff --git a/source/blender/imbuf/intern/colormanagement_inline.c b/source/blender/imbuf/intern/colormanagement_inline.c
index 668307ec802..3c6c0f5fd0a 100644
--- a/source/blender/imbuf/intern/colormanagement_inline.c
+++ b/source/blender/imbuf/intern/colormanagement_inline.c
@@ -11,6 +11,11 @@
#include "BLI_math_vector.h"
#include "IMB_colormanagement_intern.h"
+void IMB_colormanagement_get_luminance_coefficients(float r_rgb[3])
+{
+ copy_v3_v3(r_rgb, imbuf_luma_coefficients);
+}
+
float IMB_colormanagement_get_luminance(const float rgb[3])
{
return dot_v3v3(imbuf_luma_coefficients, rgb);
diff --git a/source/blender/imbuf/intern/divers.c b/source/blender/imbuf/intern/divers.c
index 588c92d748d..13c8f0887b3 100644
--- a/source/blender/imbuf/intern/divers.c
+++ b/source/blender/imbuf/intern/divers.c
@@ -695,9 +695,6 @@ void IMB_buffer_byte_from_byte(uchar *rect_to,
void IMB_rect_from_float(ImBuf *ibuf)
{
- float *buffer;
- const char *from_colorspace;
-
/* verify we have a float buffer */
if (ibuf->rect_float == NULL) {
return;
@@ -710,24 +707,21 @@ void IMB_rect_from_float(ImBuf *ibuf)
}
}
- if (ibuf->float_colorspace == NULL) {
- from_colorspace = IMB_colormanagement_role_colorspace_name_get(COLOR_ROLE_SCENE_LINEAR);
- }
- else {
- from_colorspace = ibuf->float_colorspace->name;
- }
+ const char *from_colorspace = (ibuf->float_colorspace == NULL) ?
+ IMB_colormanagement_role_colorspace_name_get(
+ COLOR_ROLE_SCENE_LINEAR) :
+ ibuf->float_colorspace->name;
+ const char *to_colorspace = (ibuf->rect_colorspace == NULL) ?
+ IMB_colormanagement_role_colorspace_name_get(
+ COLOR_ROLE_DEFAULT_BYTE) :
+ ibuf->rect_colorspace->name;
- buffer = MEM_dupallocN(ibuf->rect_float);
+ float *buffer = MEM_dupallocN(ibuf->rect_float);
/* first make float buffer in byte space */
const bool predivide = IMB_alpha_affects_rgb(ibuf);
- IMB_colormanagement_transform(buffer,
- ibuf->x,
- ibuf->y,
- ibuf->channels,
- from_colorspace,
- ibuf->rect_colorspace->name,
- predivide);
+ IMB_colormanagement_transform(
+ buffer, ibuf->x, ibuf->y, ibuf->channels, from_colorspace, to_colorspace, predivide);
/* convert from float's premul alpha to byte's straight alpha */
if (IMB_alpha_affects_rgb(ibuf)) {
diff --git a/source/blender/imbuf/intern/imageprocess.c b/source/blender/imbuf/intern/imageprocess.c
index ec25b67af5f..13bf3697946 100644
--- a/source/blender/imbuf/intern/imageprocess.c
+++ b/source/blender/imbuf/intern/imageprocess.c
@@ -22,7 +22,6 @@
#include "IMB_colormanagement.h"
#include "IMB_imbuf.h"
#include "IMB_imbuf_types.h"
-#include <math.h>
void IMB_convert_rgba_to_abgr(struct ImBuf *ibuf)
{
diff --git a/source/blender/imbuf/intern/indexer.c b/source/blender/imbuf/intern/indexer.c
index cbc5d984755..00396c01d99 100644
--- a/source/blender/imbuf/intern/indexer.c
+++ b/source/blender/imbuf/intern/indexer.c
@@ -1098,6 +1098,7 @@ static int indexer_performance_get_decode_rate(FFmpegIndexBuilderContext *contex
while (av_read_frame(context->iFormatCtx, packet) >= 0) {
if (packet->stream_index != context->videoStream) {
+ av_packet_unref(packet);
continue;
}
@@ -1121,6 +1122,7 @@ static int indexer_performance_get_decode_rate(FFmpegIndexBuilderContext *contex
if (end > start + time_period) {
break;
}
+ av_packet_unref(packet);
}
av_packet_free(&packet);
@@ -1145,6 +1147,7 @@ static int indexer_performance_get_max_gop_size(FFmpegIndexBuilderContext *conte
while (av_read_frame(context->iFormatCtx, packet) >= 0) {
if (packet->stream_index != context->videoStream) {
+ av_packet_unref(packet);
continue;
}
packet_index++;
@@ -1158,6 +1161,7 @@ static int indexer_performance_get_max_gop_size(FFmpegIndexBuilderContext *conte
if (packet_index > packets_max) {
break;
}
+ av_packet_unref(packet);
}
av_packet_free(&packet);
diff --git a/source/blender/imbuf/intern/jpeg.c b/source/blender/imbuf/intern/jpeg.c
index cffa61977f7..06f9202a1c6 100644
--- a/source/blender/imbuf/intern/jpeg.c
+++ b/source/blender/imbuf/intern/jpeg.c
@@ -524,8 +524,8 @@ struct ImBuf *imb_thumbnail_jpeg(const char *filepath,
unsigned int i = JPEG_APP1_MAX;
/* All EXIF data is within this 64K header segment. Skip ahead until next SOI for thumbnail. */
while (!((fgetc(infile) == JPEG_MARKER_MSB) && (fgetc(infile) == JPEG_MARKER_SOI)) &&
- !feof(infile) && i--)
- ;
+ !feof(infile) && i--) {
+ }
if (i > 0 && !feof(infile)) {
/* We found a JPEG thumbnail inside this image. */
ImBuf *ibuf = NULL;
diff --git a/source/blender/imbuf/intern/openexr/openexr_api.cpp b/source/blender/imbuf/intern/openexr/openexr_api.cpp
index 0414fa1268d..eb6ce5df794 100644
--- a/source/blender/imbuf/intern/openexr/openexr_api.cpp
+++ b/source/blender/imbuf/intern/openexr/openexr_api.cpp
@@ -2008,7 +2008,7 @@ struct ImBuf *imb_load_openexr(const unsigned char *mem,
printf("Error: can't process EXR multilayer file\n");
}
else {
- const int is_alpha = exr_has_alpha(*file);
+ const bool is_alpha = exr_has_alpha(*file);
ibuf = IMB_allocImBuf(width, height, is_alpha ? 32 : 24, 0);
ibuf->flags |= exr_is_half_float(*file) ? IB_halffloat : 0;
diff --git a/source/blender/imbuf/intern/readimage.c b/source/blender/imbuf/intern/readimage.c
index 4b433836767..b33e9dc4e0e 100644
--- a/source/blender/imbuf/intern/readimage.c
+++ b/source/blender/imbuf/intern/readimage.c
@@ -209,7 +209,7 @@ static void imb_cache_filename(char *filepath, const char *name, int flags)
ImBuf *IMB_loadiffname(const char *filepath, int flags, char colorspace[IM_MAX_SPACE])
{
ImBuf *ibuf;
- int file, a;
+ int file;
char filepath_tx[IMB_FILENAME_SIZE];
BLI_assert(!BLI_path_is_rel(filepath));
@@ -226,7 +226,7 @@ ImBuf *IMB_loadiffname(const char *filepath, int flags, char colorspace[IM_MAX_S
if (ibuf) {
BLI_strncpy(ibuf->name, filepath, sizeof(ibuf->name));
BLI_strncpy(ibuf->cachename, filepath_tx, sizeof(ibuf->cachename));
- for (a = 1; a < ibuf->miptot; a++) {
+ for (int a = 1; a < ibuf->miptot; a++) {
BLI_strncpy(ibuf->mipmap[a - 1]->cachename, filepath_tx, sizeof(ibuf->cachename));
}
}
diff --git a/source/blender/imbuf/intern/tiff.c b/source/blender/imbuf/intern/tiff.c
index 2f13ef409e3..1989566fc32 100644
--- a/source/blender/imbuf/intern/tiff.c
+++ b/source/blender/imbuf/intern/tiff.c
@@ -460,7 +460,7 @@ static int imb_read_tiff_pixels(ImBuf *ibuf, TIFF *image)
scanline_contig_16bit(tmpibuf->rect_float + ib_offset, sbuf, ibuf->x, spp);
}
}
- /* separate channels: RRRGGGBBB */
+ /* Separate channels: RRRGGGBBB. */
}
else if (config == PLANARCONFIG_SEPARATE) {
@@ -549,10 +549,8 @@ ImBuf *imb_loadtiff(const unsigned char *mem,
ImbTIFFMemFile memFile;
uint32_t width, height;
char *format = NULL;
- int level;
short spp;
int ib_depth;
- int found;
/* Check whether or not we have a TIFF file. */
if (imb_is_a_tiff(mem, size) == 0) {
@@ -574,7 +572,7 @@ ImBuf *imb_loadtiff(const unsigned char *mem,
TIFFGetField(image, TIFFTAG_IMAGELENGTH, &height);
TIFFGetField(image, TIFFTAG_SAMPLESPERPIXEL, &spp);
- ib_depth = (spp == 3) ? 24 : 32;
+ ib_depth = spp * 8;
ibuf = IMB_allocImBuf(width, height, ib_depth, 0);
if (ibuf) {
@@ -592,8 +590,7 @@ ImBuf *imb_loadtiff(const unsigned char *mem,
if (flags & IB_alphamode_detect) {
if (spp == 4) {
unsigned short extra, *extraSampleTypes;
-
- found = TIFFGetField(image, TIFFTAG_EXTRASAMPLES, &extra, &extraSampleTypes);
+ const int found = TIFFGetField(image, TIFFTAG_EXTRASAMPLES, &extra, &extraSampleTypes);
if (found && (extraSampleTypes[0] == EXTRASAMPLE_ASSOCALPHA)) {
ibuf->flags |= IB_alphamode_premul;
@@ -617,7 +614,7 @@ ImBuf *imb_loadtiff(const unsigned char *mem,
int numlevel = TIFFNumberOfDirectories(image);
/* create empty mipmap levels in advance */
- for (level = 0; level < numlevel; level++) {
+ for (int level = 0; level < numlevel; level++) {
if (!TIFFSetDirectory(image, level)) {
break;
}
diff --git a/source/blender/imbuf/intern/transform.cc b/source/blender/imbuf/intern/transform.cc
index 1499c1071e3..d64a48569ae 100644
--- a/source/blender/imbuf/intern/transform.cc
+++ b/source/blender/imbuf/intern/transform.cc
@@ -259,7 +259,6 @@ class WrapRepeatUV : public BaseUVWrapping {
* \brief Read a sample from an image buffer.
*
* A sampler can read from an image buffer.
- *
*/
template<
/** \brief Interpolation mode to use when sampling. */
diff --git a/source/blender/imbuf/intern/util_gpu.c b/source/blender/imbuf/intern/util_gpu.c
index 5feb0ceb515..6f1275e1812 100644
--- a/source/blender/imbuf/intern/util_gpu.c
+++ b/source/blender/imbuf/intern/util_gpu.c
@@ -14,6 +14,7 @@
#include "BKE_global.h"
#include "GPU_capabilities.h"
+#include "GPU_state.h"
#include "GPU_texture.h"
#include "IMB_colormanagement.h"
@@ -22,39 +23,62 @@
/* gpu ibuf utils */
+static bool imb_is_grayscale_texture_format_compatible(const ImBuf *ibuf)
+{
+ if (ibuf->planes > 8) {
+ return false;
+ }
+ /* Only imbufs with colorspace that do not modify the chrominance of the texture data relative
+ * to the scene color space can be uploaded as single channel textures. */
+ if (IMB_colormanagement_space_is_data(ibuf->rect_colorspace) ||
+ IMB_colormanagement_space_is_srgb(ibuf->rect_colorspace) ||
+ IMB_colormanagement_space_is_scene_linear(ibuf->rect_colorspace)) {
+ return true;
+ };
+ return false;
+}
+
static void imb_gpu_get_format(const ImBuf *ibuf,
bool high_bitdepth,
+ bool use_grayscale,
eGPUDataFormat *r_data_format,
eGPUTextureFormat *r_texture_format)
{
const bool float_rect = (ibuf->rect_float != NULL);
+ const bool is_grayscale = use_grayscale && imb_is_grayscale_texture_format_compatible(ibuf);
if (float_rect) {
/* Float. */
const bool use_high_bitdepth = (!(ibuf->flags & IB_halffloat) && high_bitdepth);
*r_data_format = GPU_DATA_FLOAT;
- *r_texture_format = use_high_bitdepth ? GPU_RGBA32F : GPU_RGBA16F;
+ *r_texture_format = is_grayscale ? (use_high_bitdepth ? GPU_R32F : GPU_R16F) :
+ (use_high_bitdepth ? GPU_RGBA32F : GPU_RGBA16F);
}
else {
if (IMB_colormanagement_space_is_data(ibuf->rect_colorspace) ||
IMB_colormanagement_space_is_scene_linear(ibuf->rect_colorspace)) {
/* Non-color data or scene linear, just store buffer as is. */
*r_data_format = GPU_DATA_UBYTE;
- *r_texture_format = GPU_RGBA8;
+ *r_texture_format = (is_grayscale) ? GPU_R8 : GPU_RGBA8;
}
else if (IMB_colormanagement_space_is_srgb(ibuf->rect_colorspace)) {
/* sRGB, store as byte texture that the GPU can decode directly. */
- *r_data_format = GPU_DATA_UBYTE;
- *r_texture_format = GPU_SRGB8_A8;
+ *r_data_format = (is_grayscale) ? GPU_DATA_FLOAT : GPU_DATA_UBYTE;
+ *r_texture_format = (is_grayscale) ? GPU_R16F : GPU_SRGB8_A8;
}
else {
/* Other colorspace, store as half float texture to avoid precision loss. */
*r_data_format = GPU_DATA_FLOAT;
- *r_texture_format = GPU_RGBA16F;
+ *r_texture_format = (is_grayscale) ? GPU_R16F : GPU_RGBA16F;
}
}
}
+static const char *imb_gpu_get_swizzle(const ImBuf *ibuf)
+{
+ return imb_is_grayscale_texture_format_compatible(ibuf) ? "rrra" : "rgba";
+}
+
/* Return false if no suitable format was found. */
#ifdef WITH_DDS
static bool IMB_gpu_get_compressed_format(const ImBuf *ibuf, eGPUTextureFormat *r_texture_format)
@@ -90,7 +114,8 @@ static void *imb_gpu_get_data(const ImBuf *ibuf,
const bool store_premultiplied,
bool *r_freedata)
{
- const bool is_float_rect = (ibuf->rect_float != NULL);
+ bool is_float_rect = (ibuf->rect_float != NULL);
+ const bool is_grayscale = imb_is_grayscale_texture_format_compatible(ibuf);
void *data_rect = (is_float_rect) ? (void *)ibuf->rect_float : (void *)ibuf->rect;
bool freedata = false;
@@ -121,7 +146,8 @@ static void *imb_gpu_get_data(const ImBuf *ibuf,
else if (IMB_colormanagement_space_is_srgb(ibuf->rect_colorspace) ||
IMB_colormanagement_space_is_scene_linear(ibuf->rect_colorspace)) {
/* sRGB or scene linear, store as byte texture that the GPU can decode directly. */
- data_rect = MEM_mallocN(sizeof(uchar[4]) * ibuf->x * ibuf->y, __func__);
+ data_rect = MEM_mallocN(
+ (is_grayscale ? sizeof(float[4]) : sizeof(uchar[4])) * ibuf->x * ibuf->y, __func__);
*r_freedata = freedata = true;
if (data_rect == NULL) {
@@ -133,8 +159,16 @@ static void *imb_gpu_get_data(const ImBuf *ibuf,
* this allows us to use sRGB texture formats and preserves color values in
* zero alpha areas, and appears generally closer to what game engines that we
* want to be compatible with do. */
- IMB_colormanagement_imbuf_to_byte_texture(
- (uchar *)data_rect, 0, 0, ibuf->x, ibuf->y, ibuf, store_premultiplied);
+ if (is_grayscale) {
+ /* Convert to byte buffer to then pack as half floats reducing the buffer size by half. */
+ IMB_colormanagement_imbuf_to_float_texture(
+ (float *)data_rect, 0, 0, ibuf->x, ibuf->y, ibuf, store_premultiplied);
+ is_float_rect = true;
+ }
+ else {
+ IMB_colormanagement_imbuf_to_byte_texture(
+ (uchar *)data_rect, 0, 0, ibuf->x, ibuf->y, ibuf, store_premultiplied);
+ }
}
else {
/* Other colorspace, store as float texture to avoid precision loss. */
@@ -167,21 +201,52 @@ static void *imb_gpu_get_data(const ImBuf *ibuf,
}
data_rect = (is_float_rect) ? (void *)scale_ibuf->rect_float : (void *)scale_ibuf->rect;
- *r_freedata = true;
+ *r_freedata = freedata = true;
/* Steal the rescaled buffer to avoid double free. */
scale_ibuf->rect_float = NULL;
scale_ibuf->rect = NULL;
IMB_freeImBuf(scale_ibuf);
}
+
+ /* Pack first channel data manually at the start of the buffer. */
+ if (is_grayscale) {
+ void *src_rect = data_rect;
+
+ if (freedata == false) {
+ data_rect = MEM_mallocN((is_float_rect ? sizeof(float) : sizeof(uchar)) * ibuf->x * ibuf->y,
+ __func__);
+ *r_freedata = freedata = true;
+ }
+
+ if (data_rect == NULL) {
+ return NULL;
+ }
+
+ if (is_float_rect) {
+ for (uint64_t i = 0; i < ibuf->x * ibuf->y; i++) {
+ ((float *)data_rect)[i] = ((float *)src_rect)[i * 4];
+ }
+ }
+ else {
+ for (uint64_t i = 0; i < ibuf->x * ibuf->y; i++) {
+ ((uchar *)data_rect)[i] = ((uchar *)src_rect)[i * 4];
+ }
+ }
+ }
return data_rect;
}
-GPUTexture *IMB_touch_gpu_texture(
- const char *name, ImBuf *ibuf, int w, int h, int layers, bool use_high_bitdepth)
+GPUTexture *IMB_touch_gpu_texture(const char *name,
+ ImBuf *ibuf,
+ int w,
+ int h,
+ int layers,
+ bool use_high_bitdepth,
+ bool use_grayscale)
{
eGPUDataFormat data_format;
eGPUTextureFormat tex_format;
- imb_gpu_get_format(ibuf, use_high_bitdepth, &data_format, &tex_format);
+ imb_gpu_get_format(ibuf, use_high_bitdepth, use_grayscale, &data_format, &tex_format);
GPUTexture *tex;
if (layers > 0) {
@@ -191,6 +256,7 @@ GPUTexture *IMB_touch_gpu_texture(
tex = GPU_texture_create_2d(name, w, h, 9999, tex_format, NULL);
}
+ GPU_texture_swizzle_set(tex, imb_gpu_get_swizzle(ibuf));
GPU_texture_anisotropic_filter(tex, true);
return tex;
}
@@ -203,6 +269,7 @@ void IMB_update_gpu_texture_sub(GPUTexture *tex,
int w,
int h,
bool use_high_bitdepth,
+ bool use_grayscale,
bool use_premult)
{
const bool do_rescale = (ibuf->x != w || ibuf->y != h);
@@ -210,7 +277,7 @@ void IMB_update_gpu_texture_sub(GPUTexture *tex,
eGPUDataFormat data_format;
eGPUTextureFormat tex_format;
- imb_gpu_get_format(ibuf, use_high_bitdepth, &data_format, &tex_format);
+ imb_gpu_get_format(ibuf, use_high_bitdepth, use_grayscale, &data_format, &tex_format);
bool freebuf = false;
@@ -266,7 +333,7 @@ GPUTexture *IMB_create_gpu_texture(const char *name,
eGPUDataFormat data_format;
eGPUTextureFormat tex_format;
- imb_gpu_get_format(ibuf, use_high_bitdepth, &data_format, &tex_format);
+ imb_gpu_get_format(ibuf, use_high_bitdepth, true, &data_format, &tex_format);
bool freebuf = false;
@@ -282,6 +349,7 @@ GPUTexture *IMB_create_gpu_texture(const char *name,
void *data = imb_gpu_get_data(ibuf, do_rescale, size, use_premult, &freebuf);
GPU_texture_update(tex, data_format, data);
+ GPU_texture_swizzle_set(tex, imb_gpu_get_swizzle(ibuf));
GPU_texture_anisotropic_filter(tex, true);
if (freebuf) {
@@ -290,3 +358,15 @@ GPUTexture *IMB_create_gpu_texture(const char *name,
return tex;
}
+
+eGPUTextureFormat IMB_gpu_get_texture_format(const ImBuf *ibuf,
+ bool high_bitdepth,
+ bool use_grayscale)
+{
+ eGPUTextureFormat gpu_texture_format;
+ eGPUDataFormat gpu_data_format;
+
+ imb_gpu_get_format(ibuf, high_bitdepth, use_grayscale, &gpu_data_format, &gpu_texture_format);
+
+ return gpu_texture_format;
+}