/*
* Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
*
- * This file is part of Libav.
+ * This file is part of FFmpeg.
*
- * Libav is free software; you can redistribute it and/or
+ * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
- * Libav is distributed in the hope that it will be useful,
+ * FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
- * License along with Libav; if not, write to the Free Software
+ * License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
*/
#include "libavutil/avassert.h"
+#include "error_resilience.h"
#include "internal.h"
+#include "mpeg_er.h"
#include "msmpeg4data.h"
+#include "qpeldsp.h"
#include "vc1.h"
#include "mss12.h"
#include "mss2dsp.h"
typedef struct MSS2Context {
VC1Context v;
int split_position;
- AVFrame pic;
- AVFrame last_pic;
+ AVFrame *last_pic;
MSS12Context c;
MSS2DSPContext dsp;
+ QpelDSPContext qdsp;
SliceContext sc[2];
} MSS2Context;
}
}
-ARITH_GET_BIT(2)
+ARITH_GET_BIT(arith2)
/* L. Stuiver and A. Moffat: "Piecewise Integer Mapping for Arithmetic Coding."
* In Proc. 8th Data Compression Conference (DCC '98), pp. 3-12, Mar. 1998 */
return i;
}
-ARITH_GET_MODEL_SYM(2)
+ARITH_GET_MODEL_SYM(arith2)
static int arith2_get_consumed_bytes(ArithCoder *c)
{
ff_mpeg_flush(avctx);
- if (s->current_picture_ptr == NULL || s->current_picture_ptr->f.data[0]) {
- int i = ff_find_unused_picture(s, 0);
- if (i < 0)
- return i;
- s->current_picture_ptr = &s->picture[i];
- }
-
- init_get_bits(&s->gb, buf, buf_size * 8);
+ if ((ret = init_get_bits8(&s->gb, buf, buf_size)) < 0)
+ return ret;
s->loop_filter = avctx->skip_loop_filter < AVDISCARD_ALL;
- if (ff_vc1_parse_frame_header(v, &s->gb) == -1) {
+ if (ff_vc1_parse_frame_header(v, &s->gb) < 0) {
av_log(v->s.avctx, AV_LOG_ERROR, "header error\n");
return AVERROR_INVALIDDATA;
}
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
- if ((ret = ff_MPV_frame_start(s, avctx)) < 0) {
- av_log(v->s.avctx, AV_LOG_ERROR, "ff_MPV_frame_start error\n");
+ if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
+ av_log(v->s.avctx, AV_LOG_ERROR, "ff_mpv_frame_start error\n");
avctx->pix_fmt = AV_PIX_FMT_RGB24;
return ret;
}
ff_er_frame_end(&s->er);
- ff_MPV_frame_end(s);
+ ff_mpv_frame_end(s);
- f = &s->current_picture.f;
+ f = s->current_picture.f;
if (v->respic == 3) {
ctx->dsp.upsample_plane(f->data[0], f->linesize[0], w, h);
- ctx->dsp.upsample_plane(f->data[1], f->linesize[1], w >> 1, h >> 1);
- ctx->dsp.upsample_plane(f->data[2], f->linesize[2], w >> 1, h >> 1);
+ ctx->dsp.upsample_plane(f->data[1], f->linesize[1], w+1 >> 1, h+1 >> 1);
+ ctx->dsp.upsample_plane(f->data[2], f->linesize[2], w+1 >> 1, h+1 >> 1);
} else if (v->respic)
- av_log_ask_for_sample(v->s.avctx,
- "Asymmetric WMV9 rectangle subsampling\n");
+ avpriv_request_sample(v->s.avctx,
+ "Asymmetric WMV9 rectangle subsampling");
av_assert0(f->linesize[1] == f->linesize[2]);
int buf_size = avpkt->size;
MSS2Context *ctx = avctx->priv_data;
MSS12Context *c = &ctx->c;
+ AVFrame *frame = data;
GetBitContext gb;
GetByteContext gB;
ArithCoder acoder;
av_assert0(FF_INPUT_BUFFER_PADDING_SIZE >=
ARITH2_PADDING + (MIN_CACHE_BITS + 7) / 8);
- init_get_bits(&gb, buf, buf_size * 8);
+ if ((ret = init_get_bits8(&gb, buf, buf_size)) < 0)
+ return ret;
if (keyframe = get_bits1(&gb))
skip_bits(&gb, 7);
return AVERROR_INVALIDDATA;
avctx->pix_fmt = is_555 ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_RGB24;
- if (ctx->pic.data[0] && ctx->pic.format != avctx->pix_fmt)
- avctx->release_buffer(avctx, &ctx->pic);
+ if (ctx->last_pic->format != avctx->pix_fmt)
+ av_frame_unref(ctx->last_pic);
if (has_wmv9) {
bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
}
if (c->mvX < 0 || c->mvY < 0) {
- FFSWAP(AVFrame, ctx->pic, ctx->last_pic);
FFSWAP(uint8_t *, c->pal_pic, c->last_pal_pic);
- if (ctx->pic.data[0])
- avctx->release_buffer(avctx, &ctx->pic);
-
- ctx->pic.reference = 3;
- ctx->pic.buffer_hints = FF_BUFFER_HINTS_VALID |
- FF_BUFFER_HINTS_READABLE |
- FF_BUFFER_HINTS_PRESERVE |
- FF_BUFFER_HINTS_REUSABLE;
-
- if ((ret = ff_get_buffer(avctx, &ctx->pic)) < 0) {
- av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
+ if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
return ret;
- }
- if (ctx->last_pic.data[0]) {
- av_assert0(ctx->pic.linesize[0] == ctx->last_pic.linesize[0]);
- c->last_rgb_pic = ctx->last_pic.data[0] +
- ctx->last_pic.linesize[0] * (avctx->height - 1);
+ if (ctx->last_pic->data[0]) {
+ av_assert0(frame->linesize[0] == ctx->last_pic->linesize[0]);
+ c->last_rgb_pic = ctx->last_pic->data[0] +
+ ctx->last_pic->linesize[0] * (avctx->height - 1);
} else {
av_log(avctx, AV_LOG_ERROR, "Missing keyframe\n");
return AVERROR_INVALIDDATA;
}
} else {
- if (ctx->last_pic.data[0])
- avctx->release_buffer(avctx, &ctx->last_pic);
-
- ctx->pic.reference = 3;
- ctx->pic.buffer_hints = FF_BUFFER_HINTS_VALID |
- FF_BUFFER_HINTS_READABLE |
- FF_BUFFER_HINTS_PRESERVE |
- FF_BUFFER_HINTS_REUSABLE;
-
- if ((ret = avctx->reget_buffer(avctx, &ctx->pic)) < 0) {
- av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
+ if ((ret = ff_reget_buffer(avctx, ctx->last_pic)) < 0)
+ return ret;
+ if ((ret = av_frame_ref(frame, ctx->last_pic)) < 0)
return ret;
- }
c->last_rgb_pic = NULL;
}
- c->rgb_pic = ctx->pic.data[0] +
- ctx->pic.linesize[0] * (avctx->height - 1);
- c->rgb_stride = -ctx->pic.linesize[0];
+ c->rgb_pic = frame->data[0] +
+ frame->linesize[0] * (avctx->height - 1);
+ c->rgb_stride = -frame->linesize[0];
- ctx->pic.key_frame = keyframe;
- ctx->pic.pict_type = keyframe ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
+ frame->key_frame = keyframe;
+ frame->pict_type = keyframe ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
if (is_555) {
bytestream2_init(&gB, buf, buf_size);
ff_mss12_slicecontext_reset(&ctx->sc[1]);
}
if (is_rle) {
- init_get_bits(&gb, buf, buf_size * 8);
+ if ((ret = init_get_bits8(&gb, buf, buf_size)) < 0)
+ return ret;
if (ret = decode_rle(&gb, c->pal_pic, c->pal_stride,
c->rgb_pic, c->rgb_stride, c->pal, keyframe,
ctx->split_position, 0,
if (buf_size)
av_log(avctx, AV_LOG_WARNING, "buffer not fully consumed\n");
+ if (c->mvX < 0 || c->mvY < 0) {
+ av_frame_unref(ctx->last_pic);
+ ret = av_frame_ref(ctx->last_pic, frame);
+ if (ret < 0)
+ return ret;
+ }
+
*got_frame = 1;
- *(AVFrame *)data = ctx->pic;
return avpkt->size;
}
int ret;
v->s.avctx = avctx;
- avctx->flags |= CODEC_FLAG_EMU_EDGE;
- v->s.flags |= CODEC_FLAG_EMU_EDGE;
if ((ret = ff_vc1_init_common(v)) < 0)
return ret;
v->overlap = 0;
- v->s.resync_marker = 0;
+ v->resync_marker = 0;
v->rangered = 0;
v->s.max_b_frames = avctx->max_b_frames = 0;
return ret;
/* error concealment */
- v->s.me.qpel_put = v->s.dsp.put_qpel_pixels_tab;
- v->s.me.qpel_avg = v->s.dsp.avg_qpel_pixels_tab;
+ v->s.me.qpel_put = v->s.qdsp.put_qpel_pixels_tab;
+ v->s.me.qpel_avg = v->s.qdsp.avg_qpel_pixels_tab;
return 0;
}
{
MSS2Context *const ctx = avctx->priv_data;
- if (ctx->pic.data[0])
- avctx->release_buffer(avctx, &ctx->pic);
- if (ctx->last_pic.data[0])
- avctx->release_buffer(avctx, &ctx->last_pic);
+ av_frame_free(&ctx->last_pic);
ff_mss12_decode_end(&ctx->c);
av_freep(&ctx->c.pal_pic);
MSS12Context *c = &ctx->c;
int ret;
c->avctx = avctx;
- avctx->coded_frame = &ctx->pic;
if (ret = ff_mss12_decode_init(c, 1, &ctx->sc[0], &ctx->sc[1]))
return ret;
+ ctx->last_pic = av_frame_alloc();
c->pal_stride = c->mask_stride;
c->pal_pic = av_mallocz(c->pal_stride * avctx->height);
c->last_pal_pic = av_mallocz(c->pal_stride * avctx->height);
- if (!c->pal_pic || !c->last_pal_pic) {
+ if (!c->pal_pic || !c->last_pal_pic || !ctx->last_pic) {
mss2_decode_end(avctx);
return AVERROR(ENOMEM);
}
return ret;
}
ff_mss2dsp_init(&ctx->dsp);
+ ff_qpeldsp_init(&ctx->qdsp);
avctx->pix_fmt = c->free_colours == 127 ? AV_PIX_FMT_RGB555
: AV_PIX_FMT_RGB24;
+
return 0;
}
AVCodec ff_mss2_decoder = {
.name = "mss2",
+ .long_name = NULL_IF_CONFIG_SMALL("MS Windows Media Video V9 Screen"),
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_MSS2,
.priv_data_size = sizeof(MSS2Context),
.close = mss2_decode_end,
.decode = mss2_decode_frame,
.capabilities = CODEC_CAP_DR1,
- .long_name = NULL_IF_CONFIG_SMALL("MS Windows Media Video V9 Screen"),
};