summaryrefslogtreecommitdiffstats
path: root/libavcodec
diff options
context:
space:
mode:
authorVittorio Giovara <vittorio.giovara@gmail.com>2015-06-08 13:00:49 +0100
committerVittorio Giovara <vittorio.giovara@gmail.com>2015-06-10 14:05:32 +0100
commit7061bf0925a7d6c5899bbe710b2b8cdae9005274 (patch)
treecbde378e36dea4df5665213617e45714faac1221 /libavcodec
parent529c05698e88b057f0bea61e0d85f2b42925b5ea (diff)
downloadffmpeg-streaming-7061bf0925a7d6c5899bbe710b2b8cdae9005274.zip
ffmpeg-streaming-7061bf0925a7d6c5899bbe710b2b8cdae9005274.tar.gz
mpegvideo: Move Picture-related functions to a separate file
Diffstat (limited to 'libavcodec')
-rw-r--r--libavcodec/Makefile2
-rw-r--r--libavcodec/mpegpicture.c424
-rw-r--r--libavcodec/mpegpicture.h109
-rw-r--r--libavcodec/mpegvideo.c397
-rw-r--r--libavcodec/mpegvideo.h78
5 files changed, 536 insertions, 474 deletions
diff --git a/libavcodec/Makefile b/libavcodec/Makefile
index 629d25d..58f548a 100644
--- a/libavcodec/Makefile
+++ b/libavcodec/Makefile
@@ -77,7 +77,7 @@ OBJS-$(CONFIG_MPEGAUDIODSP) += mpegaudiodsp.o \
mpegaudiodsp_float.o
OBJS-$(CONFIG_MPEGVIDEO) += mpegvideo.o mpegvideodsp.o rl.o \
mpegvideo_motion.o mpegutils.o \
- mpegvideodata.o
+ mpegvideodata.o mpegpicture.o
OBJS-$(CONFIG_MPEGVIDEOENC) += mpegvideo_enc.o mpeg12data.o \
motion_est.o ratecontrol.o \
mpegvideoencdsp.o
diff --git a/libavcodec/mpegpicture.c b/libavcodec/mpegpicture.c
new file mode 100644
index 0000000..9df1415
--- /dev/null
+++ b/libavcodec/mpegpicture.c
@@ -0,0 +1,424 @@
+/*
+ * Mpeg video formats-related picture management functions
+ *
+ * This file is part of Libav.
+ *
+ * Libav is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * Libav is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with Libav; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include <stdint.h>
+
+#include "libavutil/avassert.h"
+#include "libavutil/common.h"
+
+#include "avcodec.h"
+#include "motion_est.h"
+#include "mpegpicture.h"
+#include "mpegutils.h"
+
+static int make_tables_writable(Picture *pic)
+{
+ int ret, i;
+#define MAKE_WRITABLE(table) \
+do {\
+ if (pic->table &&\
+ (ret = av_buffer_make_writable(&pic->table)) < 0)\
+ return ret;\
+} while (0)
+
+ MAKE_WRITABLE(mb_var_buf);
+ MAKE_WRITABLE(mc_mb_var_buf);
+ MAKE_WRITABLE(mb_mean_buf);
+ MAKE_WRITABLE(mbskip_table_buf);
+ MAKE_WRITABLE(qscale_table_buf);
+ MAKE_WRITABLE(mb_type_buf);
+
+ for (i = 0; i < 2; i++) {
+ MAKE_WRITABLE(motion_val_buf[i]);
+ MAKE_WRITABLE(ref_index_buf[i]);
+ }
+
+ return 0;
+}
+
+int ff_mpeg_framesize_alloc(AVCodecContext *avctx, MotionEstContext *me,
+ ScratchpadContext *sc, int linesize)
+{
+ int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
+
+ // edge emu needs blocksize + filter length - 1
+ // (= 17x17 for halfpel / 21x21 for h264)
+ // VC1 computes luma and chroma simultaneously and needs 19X19 + 9x9
+ // at uvlinesize. It supports only YUV420 so 24x24 is enough
+ // linesize * interlaced * MBsize
+ FF_ALLOCZ_OR_GOTO(avctx, sc->edge_emu_buffer, alloc_size * 2 * 24,
+ fail);
+
+ FF_ALLOCZ_OR_GOTO(avctx, me->scratchpad, alloc_size * 2 * 16 * 3,
+ fail)
+ me->temp = me->scratchpad;
+ sc->rd_scratchpad = me->scratchpad;
+ sc->b_scratchpad = me->scratchpad;
+ sc->obmc_scratchpad = me->scratchpad + 16;
+
+ return 0;
+fail:
+ av_freep(&sc->edge_emu_buffer);
+ return AVERROR(ENOMEM);
+}
+
+/**
+ * Allocate a frame buffer
+ */
+static int alloc_frame_buffer(AVCodecContext *avctx, Picture *pic,
+ MotionEstContext *me, ScratchpadContext *sc,
+ int chroma_x_shift, int chroma_y_shift,
+ int linesize, int uvlinesize)
+{
+ int edges_needed = av_codec_is_encoder(avctx->codec);
+ int r, ret;
+
+ pic->tf.f = pic->f;
+ if (avctx->codec_id != AV_CODEC_ID_WMV3IMAGE &&
+ avctx->codec_id != AV_CODEC_ID_VC1IMAGE &&
+ avctx->codec_id != AV_CODEC_ID_MSS2) {
+ if (edges_needed) {
+ pic->f->width = avctx->width + 2 * EDGE_WIDTH;
+ pic->f->height = avctx->height + 2 * EDGE_WIDTH;
+ }
+
+ r = ff_thread_get_buffer(avctx, &pic->tf,
+ pic->reference ? AV_GET_BUFFER_FLAG_REF : 0);
+ } else {
+ pic->f->width = avctx->width;
+ pic->f->height = avctx->height;
+ pic->f->format = avctx->pix_fmt;
+ r = avcodec_default_get_buffer2(avctx, pic->f, 0);
+ }
+
+ if (r < 0 || !pic->f->buf[0]) {
+ av_log(avctx, AV_LOG_ERROR, "get_buffer() failed (%d %p)\n",
+ r, pic->f->data[0]);
+ return -1;
+ }
+
+ if (edges_needed) {
+ int i;
+ for (i = 0; pic->f->data[i]; i++) {
+ int offset = (EDGE_WIDTH >> (i ? chroma_y_shift : 0)) *
+ pic->f->linesize[i] +
+ (EDGE_WIDTH >> (i ? chroma_x_shift : 0));
+ pic->f->data[i] += offset;
+ }
+ pic->f->width = avctx->width;
+ pic->f->height = avctx->height;
+ }
+
+ if (avctx->hwaccel) {
+ assert(!pic->hwaccel_picture_private);
+ if (avctx->hwaccel->frame_priv_data_size) {
+ pic->hwaccel_priv_buf = av_buffer_allocz(avctx->hwaccel->frame_priv_data_size);
+ if (!pic->hwaccel_priv_buf) {
+ av_log(avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
+ return -1;
+ }
+ pic->hwaccel_picture_private = pic->hwaccel_priv_buf->data;
+ }
+ }
+
+ if (linesize && (linesize != pic->f->linesize[0] ||
+ uvlinesize != pic->f->linesize[1])) {
+ av_log(avctx, AV_LOG_ERROR,
+ "get_buffer() failed (stride changed)\n");
+ ff_mpeg_unref_picture(avctx, pic);
+ return -1;
+ }
+
+ if (pic->f->linesize[1] != pic->f->linesize[2]) {
+ av_log(avctx, AV_LOG_ERROR,
+ "get_buffer() failed (uv stride mismatch)\n");
+ ff_mpeg_unref_picture(avctx, pic);
+ return -1;
+ }
+
+ if (!sc->edge_emu_buffer &&
+ (ret = ff_mpeg_framesize_alloc(avctx, me, sc,
+ pic->f->linesize[0])) < 0) {
+ av_log(avctx, AV_LOG_ERROR,
+ "get_buffer() failed to allocate context scratch buffers.\n");
+ ff_mpeg_unref_picture(avctx, pic);
+ return ret;
+ }
+
+ return 0;
+}
+
+static int alloc_picture_tables(Picture *pic, int encoding, int out_format,
+ int mb_stride, int mb_height, int b8_stride)
+{
+ const int big_mb_num = mb_stride * (mb_height + 1) + 1;
+ const int mb_array_size = mb_stride * mb_height;
+ const int b8_array_size = b8_stride * mb_height * 2;
+ int i;
+
+
+ pic->mbskip_table_buf = av_buffer_allocz(mb_array_size + 2);
+ pic->qscale_table_buf = av_buffer_allocz(big_mb_num + mb_stride);
+ pic->mb_type_buf = av_buffer_allocz((big_mb_num + mb_stride) *
+ sizeof(uint32_t));
+ if (!pic->mbskip_table_buf || !pic->qscale_table_buf || !pic->mb_type_buf)
+ return AVERROR(ENOMEM);
+
+ if (encoding) {
+ pic->mb_var_buf = av_buffer_allocz(mb_array_size * sizeof(int16_t));
+ pic->mc_mb_var_buf = av_buffer_allocz(mb_array_size * sizeof(int16_t));
+ pic->mb_mean_buf = av_buffer_allocz(mb_array_size);
+ if (!pic->mb_var_buf || !pic->mc_mb_var_buf || !pic->mb_mean_buf)
+ return AVERROR(ENOMEM);
+ }
+
+ if (out_format == FMT_H263 || encoding) {
+ int mv_size = 2 * (b8_array_size + 4) * sizeof(int16_t);
+ int ref_index_size = 4 * mb_array_size;
+
+ for (i = 0; mv_size && i < 2; i++) {
+ pic->motion_val_buf[i] = av_buffer_allocz(mv_size);
+ pic->ref_index_buf[i] = av_buffer_allocz(ref_index_size);
+ if (!pic->motion_val_buf[i] || !pic->ref_index_buf[i])
+ return AVERROR(ENOMEM);
+ }
+ }
+
+ return 0;
+}
+
+/**
+ * Allocate a Picture.
+ * The pixels are allocated/set by calling get_buffer() if shared = 0
+ */
+int ff_alloc_picture(AVCodecContext *avctx, Picture *pic, MotionEstContext *me,
+ ScratchpadContext *sc, int shared, int encoding,
+ int chroma_x_shift, int chroma_y_shift, int out_format,
+ int mb_stride, int mb_height, int b8_stride,
+ ptrdiff_t *linesize, ptrdiff_t *uvlinesize)
+{
+ int i, ret;
+
+ if (shared) {
+ assert(pic->f->data[0]);
+ pic->shared = 1;
+ } else {
+ assert(!pic->f->buf[0]);
+ if (alloc_frame_buffer(avctx, pic, me, sc,
+ chroma_x_shift, chroma_y_shift,
+ *linesize, *uvlinesize) < 0)
+ return -1;
+
+ *linesize = pic->f->linesize[0];
+ *uvlinesize = pic->f->linesize[1];
+ }
+
+ if (!pic->qscale_table_buf)
+ ret = alloc_picture_tables(pic, encoding, out_format,
+ mb_stride, mb_height, b8_stride);
+ else
+ ret = make_tables_writable(pic);
+ if (ret < 0)
+ goto fail;
+
+ if (encoding) {
+ pic->mb_var = (uint16_t*)pic->mb_var_buf->data;
+ pic->mc_mb_var = (uint16_t*)pic->mc_mb_var_buf->data;
+ pic->mb_mean = pic->mb_mean_buf->data;
+ }
+
+ pic->mbskip_table = pic->mbskip_table_buf->data;
+ pic->qscale_table = pic->qscale_table_buf->data + 2 * mb_stride + 1;
+ pic->mb_type = (uint32_t*)pic->mb_type_buf->data + 2 * mb_stride + 1;
+
+ if (pic->motion_val_buf[0]) {
+ for (i = 0; i < 2; i++) {
+ pic->motion_val[i] = (int16_t (*)[2])pic->motion_val_buf[i]->data + 4;
+ pic->ref_index[i] = pic->ref_index_buf[i]->data;
+ }
+ }
+
+ return 0;
+fail:
+ av_log(avctx, AV_LOG_ERROR, "Error allocating a picture.\n");
+ ff_mpeg_unref_picture(avctx, pic);
+ ff_free_picture_tables(pic);
+ return AVERROR(ENOMEM);
+}
+
+/**
+ * Deallocate a picture.
+ */
+void ff_mpeg_unref_picture(AVCodecContext *avctx, Picture *pic)
+{
+ pic->tf.f = pic->f;
+ /* WM Image / Screen codecs allocate internal buffers with different
+ * dimensions / colorspaces; ignore user-defined callbacks for these. */
+ if (avctx->codec_id != AV_CODEC_ID_WMV3IMAGE &&
+ avctx->codec_id != AV_CODEC_ID_VC1IMAGE &&
+ avctx->codec_id != AV_CODEC_ID_MSS2)
+ ff_thread_release_buffer(avctx, &pic->tf);
+ else if (pic->f)
+ av_frame_unref(pic->f);
+
+ av_buffer_unref(&pic->hwaccel_priv_buf);
+
+ if (pic->needs_realloc)
+ ff_free_picture_tables(pic);
+}
+
+int ff_update_picture_tables(Picture *dst, Picture *src)
+{
+ int i;
+
+#define UPDATE_TABLE(table) \
+do { \
+ if (src->table && \
+ (!dst->table || dst->table->buffer != src->table->buffer)) { \
+ av_buffer_unref(&dst->table); \
+ dst->table = av_buffer_ref(src->table); \
+ if (!dst->table) { \
+ ff_free_picture_tables(dst); \
+ return AVERROR(ENOMEM); \
+ } \
+ } \
+} while (0)
+
+ UPDATE_TABLE(mb_var_buf);
+ UPDATE_TABLE(mc_mb_var_buf);
+ UPDATE_TABLE(mb_mean_buf);
+ UPDATE_TABLE(mbskip_table_buf);
+ UPDATE_TABLE(qscale_table_buf);
+ UPDATE_TABLE(mb_type_buf);
+ for (i = 0; i < 2; i++) {
+ UPDATE_TABLE(motion_val_buf[i]);
+ UPDATE_TABLE(ref_index_buf[i]);
+ }
+
+ dst->mb_var = src->mb_var;
+ dst->mc_mb_var = src->mc_mb_var;
+ dst->mb_mean = src->mb_mean;
+ dst->mbskip_table = src->mbskip_table;
+ dst->qscale_table = src->qscale_table;
+ dst->mb_type = src->mb_type;
+ for (i = 0; i < 2; i++) {
+ dst->motion_val[i] = src->motion_val[i];
+ dst->ref_index[i] = src->ref_index[i];
+ }
+
+ return 0;
+}
+
+int ff_mpeg_ref_picture(AVCodecContext *avctx, Picture *dst, Picture *src)
+{
+ int ret;
+
+ av_assert0(!dst->f->buf[0]);
+ av_assert0(src->f->buf[0]);
+
+ src->tf.f = src->f;
+ dst->tf.f = dst->f;
+ ret = ff_thread_ref_frame(&dst->tf, &src->tf);
+ if (ret < 0)
+ goto fail;
+
+ ret = ff_update_picture_tables(dst, src);
+ if (ret < 0)
+ goto fail;
+
+ if (src->hwaccel_picture_private) {
+ dst->hwaccel_priv_buf = av_buffer_ref(src->hwaccel_priv_buf);
+ if (!dst->hwaccel_priv_buf)
+ goto fail;
+ dst->hwaccel_picture_private = dst->hwaccel_priv_buf->data;
+ }
+
+ dst->field_picture = src->field_picture;
+ dst->mb_var_sum = src->mb_var_sum;
+ dst->mc_mb_var_sum = src->mc_mb_var_sum;
+ dst->b_frame_score = src->b_frame_score;
+ dst->needs_realloc = src->needs_realloc;
+ dst->reference = src->reference;
+ dst->shared = src->shared;
+
+ return 0;
+fail:
+ ff_mpeg_unref_picture(avctx, dst);
+ return ret;
+}
+
+static inline int pic_is_unused(Picture *pic)
+{
+ if (!pic->f->buf[0])
+ return 1;
+ if (pic->needs_realloc && !(pic->reference & DELAYED_PIC_REF))
+ return 1;
+ return 0;
+}
+
+static int find_unused_picture(Picture *picture, int shared)
+{
+ int i;
+
+ if (shared) {
+ for (i = 0; i < MAX_PICTURE_COUNT; i++) {
+ if (!picture[i].f->buf[0])
+ return i;
+ }
+ } else {
+ for (i = 0; i < MAX_PICTURE_COUNT; i++) {
+ if (pic_is_unused(&picture[i]))
+ return i;
+ }
+ }
+
+ return AVERROR_INVALIDDATA;
+}
+
+int ff_find_unused_picture(AVCodecContext *avctx, Picture *picture, int shared)
+{
+ int ret = find_unused_picture(picture, shared);
+
+ if (ret >= 0 && ret < MAX_PICTURE_COUNT) {
+ if (picture[ret].needs_realloc) {
+ picture[ret].needs_realloc = 0;
+ ff_free_picture_tables(&picture[ret]);
+ ff_mpeg_unref_picture(avctx, &picture[ret]);
+ }
+ }
+ return ret;
+}
+
+void ff_free_picture_tables(Picture *pic)
+{
+ int i;
+
+ av_buffer_unref(&pic->mb_var_buf);
+ av_buffer_unref(&pic->mc_mb_var_buf);
+ av_buffer_unref(&pic->mb_mean_buf);
+ av_buffer_unref(&pic->mbskip_table_buf);
+ av_buffer_unref(&pic->qscale_table_buf);
+ av_buffer_unref(&pic->mb_type_buf);
+
+ for (i = 0; i < 2; i++) {
+ av_buffer_unref(&pic->motion_val_buf[i]);
+ av_buffer_unref(&pic->ref_index_buf[i]);
+ }
+}
diff --git a/libavcodec/mpegpicture.h b/libavcodec/mpegpicture.h
new file mode 100644
index 0000000..0588f02
--- /dev/null
+++ b/libavcodec/mpegpicture.h
@@ -0,0 +1,109 @@
+/*
+ * Mpeg video formats-related defines and utility functions
+ *
+ * This file is part of Libav.
+ *
+ * Libav is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * Libav is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with Libav; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef AVCODEC_MPEGPICTURE_H
+#define AVCODEC_MPEGPICTURE_H
+
+#include <stdint.h>
+
+#include "libavutil/frame.h"
+
+#include "avcodec.h"
+#include "motion_est.h"
+#include "thread.h"
+
+#define MAX_PICTURE_COUNT 32
+#define EDGE_WIDTH 16
+
+typedef struct ScratchpadContext {
+ uint8_t *edge_emu_buffer; ///< temporary buffer for if MVs point to out-of-frame data
+ uint8_t *rd_scratchpad; ///< scratchpad for rate distortion mb decision
+ uint8_t *obmc_scratchpad;
+ uint8_t *b_scratchpad; ///< scratchpad used for writing into write only buffers
+} ScratchpadContext;
+
+/**
+ * Picture.
+ */
+typedef struct Picture {
+ struct AVFrame *f;
+ ThreadFrame tf;
+
+ AVBufferRef *qscale_table_buf;
+ int8_t *qscale_table;
+
+ AVBufferRef *motion_val_buf[2];
+ int16_t (*motion_val[2])[2];
+
+ AVBufferRef *mb_type_buf;
+ uint32_t *mb_type; ///< types and macros are defined in mpegutils.h
+
+ AVBufferRef *mbskip_table_buf;
+ uint8_t *mbskip_table;
+
+ AVBufferRef *ref_index_buf[2];
+ int8_t *ref_index[2];
+
+ AVBufferRef *mb_var_buf;
+ uint16_t *mb_var; ///< Table for MB variances
+
+ AVBufferRef *mc_mb_var_buf;
+ uint16_t *mc_mb_var; ///< Table for motion compensated MB variances
+
+ AVBufferRef *mb_mean_buf;
+ uint8_t *mb_mean; ///< Table for MB luminance
+
+ AVBufferRef *hwaccel_priv_buf;
+ void *hwaccel_picture_private; ///< Hardware accelerator private data
+
+ int field_picture; ///< whether or not the picture was encoded in separate fields
+
+ int mb_var_sum; ///< sum of MB variance for current frame
+ int mc_mb_var_sum; ///< motion compensated MB variance for current frame
+
+ int b_frame_score; /* */
+ int needs_realloc; ///< Picture needs to be reallocated (eg due to a frame size change)
+
+ int reference;
+ int shared;
+} Picture;
+
+/**
+ * Allocate a Picture.
+ * The pixels are allocated/set by calling get_buffer() if shared = 0.
+ */
+int ff_alloc_picture(AVCodecContext *avctx, Picture *pic, MotionEstContext *me,
+ ScratchpadContext *sc, int shared, int encoding,
+ int chroma_x_shift, int chroma_y_shift, int out_format,
+ int mb_stride, int mb_height, int b8_stride,
+ ptrdiff_t *linesize, ptrdiff_t *uvlinesize);
+
+int ff_mpeg_framesize_alloc(AVCodecContext *avctx, MotionEstContext *me,
+ ScratchpadContext *sc, int linesize);
+
+int ff_mpeg_ref_picture(AVCodecContext *avctx, Picture *dst, Picture *src);
+void ff_mpeg_unref_picture(AVCodecContext *avctx, Picture *picture);
+
+void ff_free_picture_tables(Picture *pic);
+int ff_update_picture_tables(Picture *dst, Picture *src);
+
+int ff_find_unused_picture(AVCodecContext *avctx, Picture *picture, int shared);
+
+#endif /* AVCODEC_MPEGPICTURE_H */
diff --git a/libavcodec/mpegvideo.c b/libavcodec/mpegvideo.c
index 2b938a6..86eab07 100644
--- a/libavcodec/mpegvideo.c
+++ b/libavcodec/mpegvideo.c
@@ -343,199 +343,6 @@ av_cold void ff_mpv_idct_init(MpegEncContext *s)
ff_init_scantable(s->idsp.idct_permutation, &s->intra_v_scantable, ff_alternate_vertical_scan);
}
-int ff_mpeg_framesize_alloc(AVCodecContext *avctx, MotionEstContext *me,
- ScratchpadContext *sc, int linesize)
-{
- int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
-
- // edge emu needs blocksize + filter length - 1
- // (= 17x17 for halfpel / 21x21 for h264)
- // VC1 computes luma and chroma simultaneously and needs 19X19 + 9x9
- // at uvlinesize. It supports only YUV420 so 24x24 is enough
- // linesize * interlaced * MBsize
- FF_ALLOCZ_OR_GOTO(avctx, sc->edge_emu_buffer, alloc_size * 2 * 24,
- fail);
-
- FF_ALLOCZ_OR_GOTO(avctx, me->scratchpad, alloc_size * 2 * 16 * 3,
- fail)
- me->temp = me->scratchpad;
- sc->rd_scratchpad = me->scratchpad;
- sc->b_scratchpad = me->scratchpad;
- sc->obmc_scratchpad = me->scratchpad + 16;
-
- return 0;
-fail:
- av_freep(&sc->edge_emu_buffer);
- return AVERROR(ENOMEM);
-}
-
-/**
- * Allocate a frame buffer
- */
-static int alloc_frame_buffer(AVCodecContext *avctx, Picture *pic,
- MotionEstContext *me, ScratchpadContext *sc,
- int chroma_x_shift, int chroma_y_shift,
- int linesize, int uvlinesize)
-{
- int edges_needed = av_codec_is_encoder(avctx->codec);
- int r, ret;
-
- pic->tf.f = pic->f;
- if (avctx->codec_id != AV_CODEC_ID_WMV3IMAGE &&
- avctx->codec_id != AV_CODEC_ID_VC1IMAGE &&
- avctx->codec_id != AV_CODEC_ID_MSS2) {
- if (edges_needed) {
- pic->f->width = avctx->width + 2 * EDGE_WIDTH;
- pic->f->height = avctx->height + 2 * EDGE_WIDTH;
- }
-
- r = ff_thread_get_buffer(avctx, &pic->tf,
- pic->reference ? AV_GET_BUFFER_FLAG_REF : 0);
- } else {
- pic->f->width = avctx->width;
- pic->f->height = avctx->height;
- pic->f->format = avctx->pix_fmt;
- r = avcodec_default_get_buffer2(avctx, pic->f, 0);
- }
-
- if (r < 0 || !pic->f->buf[0]) {
- av_log(avctx, AV_LOG_ERROR, "get_buffer() failed (%d %p)\n",
- r, pic->f->data[0]);
- return -1;
- }
-
- if (edges_needed) {
- int i;
- for (i = 0; pic->f->data[i]; i++) {
- int offset = (EDGE_WIDTH >> (i ? chroma_y_shift : 0)) *
- pic->f->linesize[i] +
- (EDGE_WIDTH >> (i ? chroma_x_shift : 0));
- pic->f->data[i] += offset;
- }
- pic->f->width = avctx->width;
- pic->f->height = avctx->height;
- }
-
- if (avctx->hwaccel) {
- assert(!pic->hwaccel_picture_private);
- if (avctx->hwaccel->frame_priv_data_size) {
- pic->hwaccel_priv_buf = av_buffer_allocz(avctx->hwaccel->frame_priv_data_size);
- if (!pic->hwaccel_priv_buf) {
- av_log(avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
- return -1;
- }
- pic->hwaccel_picture_private = pic->hwaccel_priv_buf->data;
- }
- }
-
- if (linesize && (linesize != pic->f->linesize[0] ||
- uvlinesize != pic->f->linesize[1])) {
- av_log(avctx, AV_LOG_ERROR,
- "get_buffer() failed (stride changed)\n");
- ff_mpeg_unref_picture(avctx, pic);
- return -1;
- }
-
- if (pic->f->linesize[1] != pic->f->linesize[2]) {
- av_log(avctx, AV_LOG_ERROR,
- "get_buffer() failed (uv stride mismatch)\n");
- ff_mpeg_unref_picture(avctx, pic);
- return -1;
- }
-
- if (!sc->edge_emu_buffer &&
- (ret = ff_mpeg_framesize_alloc(avctx, me, sc,
- pic->f->linesize[0])) < 0) {
- av_log(avctx, AV_LOG_ERROR,
- "get_buffer() failed to allocate context scratch buffers.\n");
- ff_mpeg_unref_picture(avctx, pic);
- return ret;
- }
-
- return 0;
-}
-
-void ff_free_picture_tables(Picture *pic)
-{
- int i;
-
- av_buffer_unref(&pic->mb_var_buf);
- av_buffer_unref(&pic->mc_mb_var_buf);
- av_buffer_unref(&pic->mb_mean_buf);
- av_buffer_unref(&pic->mbskip_table_buf);
- av_buffer_unref(&pic->qscale_table_buf);
- av_buffer_unref(&pic->mb_type_buf);
-
- for (i = 0; i < 2; i++) {
- av_buffer_unref(&pic->motion_val_buf[i]);
- av_buffer_unref(&pic->ref_index_buf[i]);
- }
-}
-
-static int alloc_picture_tables(Picture *pic, int encoding, int out_format,
- int mb_stride, int mb_height, int b8_stride)
-{
- const int big_mb_num = mb_stride * (mb_height + 1) + 1;
- const int mb_array_size = mb_stride * mb_height;
- const int b8_array_size = b8_stride * mb_height * 2;
- int i;
-
-
- pic->mbskip_table_buf = av_buffer_allocz(mb_array_size + 2);
- pic->qscale_table_buf = av_buffer_allocz(big_mb_num + mb_stride);
- pic->mb_type_buf = av_buffer_allocz((big_mb_num + mb_stride) *
- sizeof(uint32_t));
- if (!pic->mbskip_table_buf || !pic->qscale_table_buf || !pic->mb_type_buf)
- return AVERROR(ENOMEM);
-
- if (encoding) {
- pic->mb_var_buf = av_buffer_allocz(mb_array_size * sizeof(int16_t));
- pic->mc_mb_var_buf = av_buffer_allocz(mb_array_size * sizeof(int16_t));
- pic->mb_mean_buf = av_buffer_allocz(mb_array_size);
- if (!pic->mb_var_buf || !pic->mc_mb_var_buf || !pic->mb_mean_buf)
- return AVERROR(ENOMEM);
- }
-
- if (out_format == FMT_H263 || encoding) {
- int mv_size = 2 * (b8_array_size + 4) * sizeof(int16_t);
- int ref_index_size = 4 * mb_array_size;
-
- for (i = 0; mv_size && i < 2; i++) {
- pic->motion_val_buf[i] = av_buffer_allocz(mv_size);
- pic->ref_index_buf[i] = av_buffer_allocz(ref_index_size);
- if (!pic->motion_val_buf[i] || !pic->ref_index_buf[i])
- return AVERROR(ENOMEM);
- }
- }
-
- return 0;
-}
-
-static int make_tables_writable(Picture *pic)
-{
- int ret, i;
-#define MAKE_WRITABLE(table) \
-do {\
- if (pic->table &&\
- (ret = av_buffer_make_writable(&pic->table)) < 0)\
- return ret;\
-} while (0)
-
- MAKE_WRITABLE(mb_var_buf);
- MAKE_WRITABLE(mc_mb_var_buf);
- MAKE_WRITABLE(mb_mean_buf);
- MAKE_WRITABLE(mbskip_table_buf);
- MAKE_WRITABLE(qscale_table_buf);
- MAKE_WRITABLE(mb_type_buf);
-
- for (i = 0; i < 2; i++) {
- MAKE_WRITABLE(motion_val_buf[i]);
- MAKE_WRITABLE(ref_index_buf[i]);
- }
-
- return 0;
-}
-
static int alloc_picture(MpegEncContext *s, Picture *pic, int shared)
{
return ff_alloc_picture(s->avctx, pic, &s->me, &s->sc, shared, 0,
@@ -544,166 +351,6 @@ static int alloc_picture(MpegEncContext *s, Picture *pic, int shared)
&s->linesize, &s->uvlinesize);
}
-/**
- * Allocate a Picture.
- * The pixels are allocated/set by calling get_buffer() if shared = 0
- */
-int ff_alloc_picture(AVCodecContext *avctx, Picture *pic, MotionEstContext *me,
- ScratchpadContext *sc, int shared, int encoding,
- int chroma_x_shift, int chroma_y_shift, int out_format,
- int mb_stride, int mb_height, int b8_stride,
- ptrdiff_t *linesize, ptrdiff_t *uvlinesize)
-{
- int i, ret;
-
- if (shared) {
- assert(pic->f->data[0]);
- pic->shared = 1;
- } else {
- assert(!pic->f->buf[0]);
- if (alloc_frame_buffer(avctx, pic, me, sc,
- chroma_x_shift, chroma_y_shift,
- *linesize, *uvlinesize) < 0)
- return -1;
-
- *linesize = pic->f->linesize[0];
- *uvlinesize = pic->f->linesize[1];
- }
-
- if (!pic->qscale_table_buf)
- ret = alloc_picture_tables(pic, encoding, out_format,
- mb_stride, mb_height, b8_stride);
- else
- ret = make_tables_writable(pic);
- if (ret < 0)
- goto fail;
-
- if (encoding) {
- pic->mb_var = (uint16_t*)pic->mb_var_buf->data;
- pic->mc_mb_var = (uint16_t*)pic->mc_mb_var_buf->data;
- pic->mb_mean = pic->mb_mean_buf->data;
- }
-
- pic->mbskip_table = pic->mbskip_table_buf->data;
- pic->qscale_table = pic->qscale_table_buf->data + 2 * mb_stride + 1;
- pic->mb_type = (uint32_t*)pic->mb_type_buf->data + 2 * mb_stride + 1;
-
- if (pic->motion_val_buf[0]) {
- for (i = 0; i < 2; i++) {
- pic->motion_val[i] = (int16_t (*)[2])pic->motion_val_buf[i]->data + 4;
- pic->ref_index[i] = pic->ref_index_buf[i]->data;
- }
- }
-
- return 0;
-fail:
- av_log(avctx, AV_LOG_ERROR, "Error allocating a picture.\n");
- ff_mpeg_unref_picture(avctx, pic);
- ff_free_picture_tables(pic);
- return AVERROR(ENOMEM);
-}
-
-/**
- * Deallocate a picture.
- */
-void ff_mpeg_unref_picture(AVCodecContext *avctx, Picture *pic)
-{
- pic->tf.f = pic->f;
- /* WM Image / Screen codecs allocate internal buffers with different
- * dimensions / colorspaces; ignore user-defined callbacks for these. */
- if (avctx->codec->id != AV_CODEC_ID_WMV3IMAGE &&
- avctx->codec->id != AV_CODEC_ID_VC1IMAGE &&
- avctx->codec->id != AV_CODEC_ID_MSS2)
- ff_thread_release_buffer(avctx, &pic->tf);
- else if (pic->f)
- av_frame_unref(pic->f);
-
- av_buffer_unref(&pic->hwaccel_priv_buf);
-
- if (pic->needs_realloc)
- ff_free_picture_tables(pic);
-}
-
-static int update_picture_tables(Picture *dst, Picture *src)
-{
- int i;
-
-#define UPDATE_TABLE(table)\
-do {\
- if (src->table &&\
- (!dst->table || dst->table->buffer != src->table->buffer)) {\
- av_buffer_unref(&dst->table);\
- dst->table = av_buffer_ref(src->table);\
- if (!dst->table) {\
- ff_free_picture_tables(dst);\
- return AVERROR(ENOMEM);\
- }\
- }\
-} while (0)
-
- UPDATE_TABLE(mb_var_buf);
- UPDATE_TABLE(mc_mb_var_buf);
- UPDATE_TABLE(mb_mean_buf);
- UPDATE_TABLE(mbskip_table_buf);
- UPDATE_TABLE(qscale_table_buf);
- UPDATE_TABLE(mb_type_buf);
- for (i = 0; i < 2; i++) {
- UPDATE_TABLE(motion_val_buf[i]);
- UPDATE_TABLE(ref_index_buf[i]);
- }
-
- dst->mb_var = src->mb_var;
- dst->mc_mb_var = src->mc_mb_var;
- dst->mb_mean = src->mb_mean;
- dst->mbskip_table = src->mbskip_table;
- dst->qscale_table = src->qscale_table;
- dst->mb_type = src->mb_type;
- for (i = 0; i < 2; i++) {
- dst->motion_val[i] = src->motion_val[i];
- dst->ref_index[i] = src->ref_index[i];
- }
-
- return 0;
-}
-
-int ff_mpeg_ref_picture(AVCodecContext *avctx, Picture *dst, Picture *src)
-{
- int ret;
-
- av_assert0(!dst->f->buf[0]);
- av_assert0(src->f->buf[0]);
-
- src->tf.f = src->f;
- dst->tf.f = dst->f;
- ret = ff_thread_ref_frame(&dst->tf, &src->tf);
- if (ret < 0)
- goto fail;
-
- ret = update_picture_tables(dst, src);
- if (ret < 0)
- goto fail;
-
- if (src->hwaccel_picture_private) {
- dst->hwaccel_priv_buf = av_buffer_ref(src->hwaccel_priv_buf);
- if (!dst->hwaccel_priv_buf)
- goto fail;
- dst->hwaccel_picture_private = dst->hwaccel_priv_buf->data;
- }
-
- dst->field_picture = src->field_picture;
- dst->mb_var_sum = src->mb_var_sum;
- dst->mc_mb_var_sum = src->mc_mb_var_sum;
- dst->b_frame_score = src->b_frame_score;
- dst->needs_realloc = src->needs_realloc;
- dst->reference = src->reference;
- dst->shared = src->shared;
-
- return 0;
-fail:
- ff_mpeg_unref_picture(avctx, dst);
- return ret;
-}
-
static int init_duplicate_context(MpegEncContext *s)
{
int y_size = s->b8_stride * (2 * s->mb_height + 1);
@@ -888,7 +535,7 @@ do {\
if (s1->pic.f->buf[0])\
ret = ff_mpeg_ref_picture(s->avctx, &s->pic, &s1->pic);\
else\
- ret = update_picture_tables(&s->pic, &s1->pic);\
+ ret = ff_update_picture_tables(&s->pic, &s1->pic);\
if (ret < 0)\
return ret;\
} while (0)
@@ -1473,48 +1120,6 @@ static void release_unused_pictures(AVCodecContext *avctx, Picture *picture)
}
}
-static inline int pic_is_unused(Picture *pic)
-{
- if (!pic->f->buf[0])
- return 1;
- if (pic->needs_realloc && !(pic->reference & DELAYED_PIC_REF))
- return 1;
- return 0;
-}
-
-static int find_unused_picture(Picture *picture, int shared)
-{
- int i;
-
- if (shared) {
- for (i = 0; i < MAX_PICTURE_COUNT; i++) {
- if (!picture[i].f->buf[0])
- return i;
- }
- } else {
- for (i = 0; i < MAX_PICTURE_COUNT; i++) {
- if (pic_is_unused(&picture[i]))
- return i;
- }
- }
-
- return AVERROR_INVALIDDATA;
-}
-
-int ff_find_unused_picture(AVCodecContext *avctx, Picture *picture, int shared)
-{
- int ret = find_unused_picture(picture, shared);
-
- if (ret >= 0 && ret < MAX_PICTURE_COUNT) {
- if (picture[ret].needs_realloc) {
- picture[ret].needs_realloc = 0;
- ff_free_picture_tables(&picture[ret]);
- ff_mpeg_unref_picture(avctx, &picture[ret]);
- }
- }
- return ret;
-}
-
/**
* generic function called after decoding
* the header and before a frame is decoded.
diff --git a/libavcodec/mpegvideo.h b/libavcodec/mpegvideo.h
index d5e14f7..b2b2f6d 100644
--- a/libavcodec/mpegvideo.h
+++ b/libavcodec/mpegvideo.h
@@ -40,6 +40,7 @@
#include "idctdsp.h"
#include "me_cmp.h"
#include "motion_est.h"
+#include "mpegpicture.h"
#include "mpegvideodsp.h"
#include "mpegvideoencdsp.h"
#include "pixblockdsp.h"
@@ -60,8 +61,6 @@
#define MAX_THREADS 16
-#define MAX_PICTURE_COUNT 32
-
#define MAX_B_FRAMES 16
#define ME_MAP_SIZE 64
@@ -70,8 +69,6 @@
#define INPLACE_OFFSET 16
-#define EDGE_WIDTH 16
-
/* Start codes. */
#define SEQ_END_CODE 0x000001b7
#define SEQ_START_CODE 0x000001b3
@@ -83,62 +80,6 @@
#define USER_START_CODE 0x000001b2
/**
- * Picture.
- */
-typedef struct Picture{
- struct AVFrame *f;
- ThreadFrame tf;
-
- AVBufferRef *qscale_table_buf;
- int8_t *qscale_table;
-
- AVBufferRef *motion_val_buf[2];
- int16_t (*motion_val[2])[2];
-
- AVBufferRef *mb_type_buf;
- uint32_t *mb_type; ///< types and macros are defined in mpegutils.h
-
- AVBufferRef *mbskip_table_buf;
- uint8_t *mbskip_table;
-
- AVBufferRef *ref_index_buf[2];
- int8_t *ref_index[2];
-
- AVBufferRef *mb_var_buf;
- uint16_t *mb_var; ///< Table for MB variances
-
- AVBufferRef *mc_mb_var_buf;
- uint16_t *mc_mb_var; ///< Table for motion compensated MB variances
-
- AVBufferRef *mb_mean_buf;
- uint8_t *mb_mean; ///< Table for MB luminance
-
- AVBufferRef *hwaccel_priv_buf;
- /**
- * hardware accelerator private data
- */
- void *hwaccel_picture_private;
-
- int field_picture; ///< whether or not the picture was encoded in separate fields
-
- int mb_var_sum; ///< sum of MB variance for current frame
- int mc_mb_var_sum; ///< motion compensated MB variance for current frame
-
- int b_frame_score; /* */
- int needs_realloc; ///< Picture needs to be reallocated (eg due to a frame size change)
-
- int reference;
- int shared;
-} Picture;
-
-typedef struct ScratchpadContext {
- uint8_t *edge_emu_buffer; ///< temporary buffer for if MVs point to out-of-frame data
- uint8_t *rd_scratchpad; ///< scratchpad for rate distortion mb decision
- uint8_t *obmc_scratchpad;
- uint8_t *b_scratchpad; ///< scratchpad used for writing into write only buffers
-} ScratchpadContext;
-
-/**
* MpegEncContext.
*/
typedef struct MpegEncContext {
@@ -679,7 +620,6 @@ void ff_mpeg_flush(AVCodecContext *avctx);
void ff_print_debug_info(MpegEncContext *s, Picture *p);
void ff_write_quant_matrix(PutBitContext *pb, uint16_t *matrix);
-int ff_find_unused_picture(AVCodecContext *avctx, Picture *picture, int shared);
int ff_update_duplicate_context(MpegEncContext *dst, MpegEncContext *src);
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src);
void ff_set_qscale(MpegEncContext * s, int qscale);
@@ -699,18 +639,6 @@ void ff_mpv_motion(MpegEncContext *s,
qpel_mc_func (*qpix_op)[16]);
/**
- * Allocate a Picture.
- * The pixels are allocated/set by calling get_buffer() if shared = 0.
- */
-int ff_alloc_picture(AVCodecContext *avctx, Picture *pic, MotionEstContext *me,
- ScratchpadContext *sc, int shared, int encoding,
- int chroma_x_shift, int chroma_y_shift, int out_format,
- int mb_stride, int mb_height, int b8_stride,
- ptrdiff_t *linesize, ptrdiff_t *uvlinesize);
-
-int ff_mpeg_framesize_alloc(AVCodecContext *avctx, MotionEstContext *me,
- ScratchpadContext *sc, int linesize);
-/**
* permute block according to permuatation.
* @param last last non zero element in scantable order
*/
@@ -744,8 +672,4 @@ int ff_rv10_encode_picture_header(MpegEncContext *s, int picture_number);
int ff_rv_decode_dc(MpegEncContext *s, int n);
void ff_rv20_encode_picture_header(MpegEncContext *s, int picture_number);
-int ff_mpeg_ref_picture(AVCodecContext *avctx, Picture *dst, Picture *src);
-void ff_mpeg_unref_picture(AVCodecContext *avctx, Picture *picture);
-void ff_free_picture_tables(Picture *pic);
-
#endif /* AVCODEC_MPEGVIDEO_H */
OpenPOWER on IntegriCloud