2 * Interplay MVE Video Decoder
3 * Copyright (C) 2003 the ffmpeg project
5 * This file is part of FFmpeg.
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 * @file libavcodec/interplayvideo.c
24 * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25 * For more information about the Interplay MVE format, visit:
26 * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27 * This code is written in such a way that the identifiers match up
28 * with the encoding descriptions in the document.
30 * This decoder presently only supports a PAL8 output colorspace.
32 * An Interplay video frame consists of 2 parts: The decoding map and
33 * the video data. A demuxer must load these 2 parts together in a single
34 * buffer before sending it through the stream to this decoder.
43 #include "bytestream.h"
46 #define PALETTE_COUNT 256
48 /* debugging support */
49 #define DEBUG_INTERPLAY 0
51 #define debug_interplay(x,...) av_log(NULL, AV_LOG_DEBUG, x, __VA_ARGS__)
53 static inline void debug_interplay(const char *format, ...) { }
56 typedef struct IpvideoContext {
58 AVCodecContext *avctx;
60 AVFrame second_last_frame;
62 AVFrame current_frame;
63 const unsigned char *decoding_map;
64 int decoding_map_size;
66 const unsigned char *buf;
69 const unsigned char *stream_ptr;
70 const unsigned char *stream_end;
71 unsigned char *pixel_ptr;
74 int upper_motion_limit_offset;
78 #define CHECK_STREAM_PTR(n) \
79 if (s->stream_end - s->stream_ptr < n) { \
80 av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
81 s->stream_ptr + n, s->stream_end); \
85 static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
87 int current_offset = s->pixel_ptr - s->current_frame.data[0];
88 int motion_offset = current_offset + delta_y * s->stride + delta_x;
89 if (motion_offset < 0) {
90 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
92 } else if (motion_offset > s->upper_motion_limit_offset) {
93 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
94 motion_offset, s->upper_motion_limit_offset);
97 s->dsp.put_pixels_tab[1][0](s->pixel_ptr, src->data[0] + motion_offset, s->stride, 8);
101 static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
103 return copy_from(s, &s->last_frame, 0, 0);
106 static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
108 return copy_from(s, &s->second_last_frame, 0, 0);
111 static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
116 /* copy block from 2 frames ago using a motion vector; need 1 more byte */
118 B = *s->stream_ptr++;
124 x = -14 + ((B - 56) % 29);
125 y = 8 + ((B - 56) / 29);
128 debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
129 return copy_from(s, &s->second_last_frame, x, y);
132 static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
137 /* copy 8x8 block from current frame from an up/left block */
139 /* need 1 more byte for motion */
141 B = *s->stream_ptr++;
147 x = -(-14 + ((B - 56) % 29));
148 y = -( 8 + ((B - 56) / 29));
151 debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
152 return copy_from(s, &s->current_frame, x, y);
155 static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
158 unsigned char B, BL, BH;
160 /* copy a block from the previous frame; need 1 more byte */
163 B = *s->stream_ptr++;
165 BH = (B >> 4) & 0x0F;
169 debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
170 return copy_from(s, &s->last_frame, x, y);
173 static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
177 /* copy a block from the previous frame using an expanded range;
178 * need 2 more bytes */
181 x = *s->stream_ptr++;
182 y = *s->stream_ptr++;
184 debug_interplay (" motion bytes = %d, %d\n", x, y);
185 return copy_from(s, &s->last_frame, x, y);
188 static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
190 /* mystery opcode? skip multiple blocks? */
191 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
197 static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
203 /* 2-color encoding */
206 P[0] = *s->stream_ptr++;
207 P[1] = *s->stream_ptr++;
211 /* need 8 more bytes from the stream */
214 for (y = 0; y < 8; y++) {
215 flags = *s->stream_ptr++;
216 for (x = 0x01; x <= 0x80; x <<= 1) {
217 *s->pixel_ptr++ = P[!!(flags & x)];
219 s->pixel_ptr += s->line_inc;
224 /* need 2 more bytes from the stream */
227 flags = bytestream_get_le16(&s->stream_ptr);
228 for (y = 0; y < 8; y += 2) {
229 for (x = 0; x < 8; x += 2, flags >>= 1) {
231 s->pixel_ptr[x + 1 ] =
232 s->pixel_ptr[x + s->stride] =
233 s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
235 s->pixel_ptr += s->stride * 2;
243 static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
248 unsigned int flags = 0;
249 unsigned char P0 = 0, P1 = 0;
252 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
253 * either top and bottom or left and right halves */
256 P[0] = *s->stream_ptr++;
257 P[1] = *s->stream_ptr++;
261 /* need 12 more bytes */
262 CHECK_STREAM_PTR(12);
263 B[0] = *s->stream_ptr++; B[1] = *s->stream_ptr++;
264 P[2] = *s->stream_ptr++; P[3] = *s->stream_ptr++;
265 B[2] = *s->stream_ptr++; B[3] = *s->stream_ptr++;
266 P[4] = *s->stream_ptr++; P[5] = *s->stream_ptr++;
267 B[4] = *s->stream_ptr++; B[5] = *s->stream_ptr++;
268 P[6] = *s->stream_ptr++; P[7] = *s->stream_ptr++;
269 B[6] = *s->stream_ptr++; B[7] = *s->stream_ptr++;
271 for (y = 0; y < 8; y++) {
273 /* time to reload flags? */
276 ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
277 ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
278 ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
279 ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
280 lower_half = 0; /* still on top half */
283 ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
284 ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
285 ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
286 ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
290 for (x = 0; x < 8; x++, flags >>= 1) {
291 /* get the pixel values ready for this quadrant */
293 P0 = P[lower_half + 0];
294 P1 = P[lower_half + 1];
296 P0 = P[lower_half + 4];
297 P1 = P[lower_half + 5];
300 *s->pixel_ptr++ = flags & 1 ? P1 : P0;
302 s->pixel_ptr += s->line_inc;
307 /* need 10 more bytes */
308 CHECK_STREAM_PTR(10);
312 B[0] = *s->stream_ptr++; B[1] = *s->stream_ptr++;
313 B[2] = *s->stream_ptr++; B[3] = *s->stream_ptr++;
314 P[2] = *s->stream_ptr++; P[3] = *s->stream_ptr++;
315 B[4] = *s->stream_ptr++; B[5] = *s->stream_ptr++;
316 B[6] = *s->stream_ptr++; B[7] = *s->stream_ptr++;
318 /* vertical split; left & right halves are 2-color encoded */
320 for (y = 0; y < 8; y++) {
322 /* time to reload flags? */
325 ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
326 ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
327 ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
328 ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
331 ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
332 ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
333 ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
334 ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
337 for (x = 0; x < 8; x++, flags >>= 1) {
338 /* get the pixel values ready for this half */
347 *s->pixel_ptr++ = flags & 1 ? P1 : P0;
349 s->pixel_ptr += s->line_inc;
354 /* horizontal split; top & bottom halves are 2-color encoded */
356 for (y = 0; y < 8; y++) {
360 P[0] = *s->stream_ptr++;
361 P[1] = *s->stream_ptr++;
363 flags = *s->stream_ptr++;
365 for (bitmask = 0x01; bitmask <= 0x80; bitmask <<= 1) {
367 *s->pixel_ptr++ = P[!!(flags & bitmask)];
369 s->pixel_ptr += s->line_inc;
378 static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
383 /* 4-color encoding */
386 memcpy(P, s->stream_ptr, 4);
389 if ((P[0] <= P[1]) && (P[2] <= P[3])) {
391 /* 1 of 4 colors for each pixel, need 16 more bytes */
392 CHECK_STREAM_PTR(16);
394 for (y = 0; y < 8; y++) {
395 /* get the next set of 8 2-bit flags */
396 int flags = bytestream_get_le16(&s->stream_ptr);
397 for (x = 0; x < 8; x++, flags >>= 2) {
398 *s->pixel_ptr++ = P[flags & 0x03];
400 s->pixel_ptr += s->line_inc;
403 } else if ((P[0] <= P[1]) && (P[2] > P[3])) {
406 /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
409 flags = bytestream_get_le32(&s->stream_ptr);
411 for (y = 0; y < 8; y += 2) {
412 for (x = 0; x < 8; x += 2, flags >>= 2) {
414 s->pixel_ptr[x + 1 ] =
415 s->pixel_ptr[x + s->stride] =
416 s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
418 s->pixel_ptr += s->stride * 2;
421 } else if ((P[0] > P[1]) && (P[2] <= P[3])) {
424 /* 1 of 4 colors for each 2x1 block, need 8 more bytes */
427 flags = bytestream_get_le64(&s->stream_ptr);
428 for (y = 0; y < 8; y++) {
429 for (x = 0; x < 8; x += 2, flags >>= 2) {
431 s->pixel_ptr[x + 1] = P[flags & 0x03];
433 s->pixel_ptr += s->stride;
439 /* 1 of 4 colors for each 1x2 block, need 8 more bytes */
442 flags = bytestream_get_le64(&s->stream_ptr);
443 for (y = 0; y < 8; y += 2) {
444 for (x = 0; x < 8; x++, flags >>= 2) {
446 s->pixel_ptr[x + s->stride] = P[flags & 0x03];
448 s->pixel_ptr += s->stride * 2;
456 static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
466 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
467 * either top and bottom or left and right halves */
470 memcpy(P, s->stream_ptr, 4);
475 /* 4-color encoding for each quadrant; need 28 more bytes */
476 CHECK_STREAM_PTR(28);
478 memcpy(B, s->stream_ptr, 4);
480 for (y = 4; y < 16; y += 4) {
481 memcpy(P + y, s->stream_ptr, 4);
483 memcpy(B + y, s->stream_ptr, 4);
487 for (y = 0; y < 8; y++) {
489 lower_half = (y >= 4) ? 4 : 0;
490 flags = (B[y + 8] << 8) | B[y];
492 for (x = 0; x < 8; x++, flags >>= 2) {
493 split = (x >= 4) ? 8 : 0;
494 index = split + lower_half + (flags & 0x03);
495 *s->pixel_ptr++ = P[index];
498 s->pixel_ptr += s->line_inc;
503 /* 4-color encoding for either left and right or top and bottom
504 * halves; need 20 more bytes */
505 CHECK_STREAM_PTR(20);
507 memcpy(B, s->stream_ptr, 8);
509 memcpy(P + 4, s->stream_ptr, 4);
511 memcpy(B + 8, s->stream_ptr, 8);
516 /* block is divided into left and right halves */
517 for (y = 0; y < 8; y++) {
519 flags = (B[y + 8] << 8) | B[y];
522 for (x = 0; x < 8; x++, flags >>= 2) {
525 *s->pixel_ptr++ = P[split + (flags & 0x03)];
528 s->pixel_ptr += s->line_inc;
533 /* block is divided into top and bottom halves */
535 for (y = 0; y < 8; y++) {
537 flags = (B[y * 2 + 1] << 8) | B[y * 2];
541 for (x = 0; x < 8; x++, flags >>= 2)
542 *s->pixel_ptr++ = P[split + (flags & 0x03)];
544 s->pixel_ptr += s->line_inc;
553 static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
557 /* 64-color encoding (each pixel in block is a different color) */
558 CHECK_STREAM_PTR(64);
560 for (y = 0; y < 8; y++) {
561 memcpy(s->pixel_ptr, s->stream_ptr, 8);
563 s->pixel_ptr += s->stride;
570 static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
574 /* 16-color block encoding: each 2x2 block is a different color */
575 CHECK_STREAM_PTR(16);
577 for (y = 0; y < 8; y += 2) {
578 for (x = 0; x < 8; x += 2) {
580 s->pixel_ptr[x + 1 ] =
581 s->pixel_ptr[x + s->stride] =
582 s->pixel_ptr[x + 1 + s->stride] = *s->stream_ptr++;
584 s->pixel_ptr += s->stride * 2;
591 static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
595 unsigned char index = 0;
597 /* 4-color block encoding: each 4x4 block is a different color */
600 memcpy(P, s->stream_ptr, 4);
603 for (y = 0; y < 8; y++) {
609 memset(s->pixel_ptr , P[index ], 4);
610 memset(s->pixel_ptr + 4, P[index + 1], 4);
611 s->pixel_ptr += s->stride;
618 static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
623 /* 1-color encoding: the whole block is 1 solid color */
625 pix = *s->stream_ptr++;
627 for (y = 0; y < 8; y++) {
628 memset(s->pixel_ptr, pix, 8);
629 s->pixel_ptr += s->stride;
636 static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
639 unsigned char sample[2];
641 /* dithered encoding */
643 sample[0] = *s->stream_ptr++;
644 sample[1] = *s->stream_ptr++;
646 for (y = 0; y < 8; y++) {
647 for (x = 0; x < 8; x += 2) {
648 *s->pixel_ptr++ = sample[ y & 1 ];
649 *s->pixel_ptr++ = sample[!(y & 1)];
651 s->pixel_ptr += s->line_inc;
658 static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
659 ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
660 ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
661 ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
662 ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
663 ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
664 ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
665 ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
666 ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
669 static void ipvideo_decode_opcodes(IpvideoContext *s)
673 unsigned char opcode;
675 int code_counts[16] = {0};
676 static int frame = 0;
678 debug_interplay("------------------ frame %d\n", frame);
681 /* this is PAL8, so make the palette available */
682 memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4);
684 s->stride = s->current_frame.linesize[0];
685 s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
686 s->stream_end = s->buf + s->size;
687 s->line_inc = s->stride - 8;
688 s->upper_motion_limit_offset = (s->avctx->height - 8) * s->stride
689 + s->avctx->width - 8;
691 for (y = 0; y < (s->stride * s->avctx->height); y += s->stride * 8) {
692 for (x = y; x < y + s->avctx->width; x += 8) {
693 /* bottom nibble first, then top nibble (which makes it
694 * hard to use a GetBitcontext) */
696 opcode = s->decoding_map[index >> 1] >> 4;
698 opcode = s->decoding_map[index >> 1] & 0xF;
701 debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
702 x - y, y / s->stride, opcode, s->stream_ptr);
703 code_counts[opcode]++;
705 s->pixel_ptr = s->current_frame.data[0] + x;
706 ret = ipvideo_decode_block[opcode](s);
708 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
709 frame, x - y, y / s->stride);
714 if (s->stream_end - s->stream_ptr > 1) {
715 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
716 s->stream_end - s->stream_ptr);
720 static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
722 IpvideoContext *s = avctx->priv_data;
726 if (s->avctx->palctrl == NULL) {
727 av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
731 avctx->pix_fmt = PIX_FMT_PAL8;
732 dsputil_init(&s->dsp, avctx);
734 /* decoding map contains 4 bits of information per 8x8 block */
735 s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
737 s->current_frame.data[0] = s->last_frame.data[0] =
738 s->second_last_frame.data[0] = NULL;
743 static int ipvideo_decode_frame(AVCodecContext *avctx,
744 void *data, int *data_size,
745 const uint8_t *buf, int buf_size)
747 IpvideoContext *s = avctx->priv_data;
748 AVPaletteControl *palette_control = avctx->palctrl;
750 /* compressed buffer needs to be large enough to at least hold an entire
752 if (buf_size < s->decoding_map_size)
755 s->decoding_map = buf;
756 s->buf = buf + s->decoding_map_size;
757 s->size = buf_size - s->decoding_map_size;
759 s->current_frame.reference = 3;
760 if (avctx->get_buffer(avctx, &s->current_frame)) {
761 av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
765 ipvideo_decode_opcodes(s);
767 if (palette_control->palette_changed) {
768 palette_control->palette_changed = 0;
769 s->current_frame.palette_has_changed = 1;
772 *data_size = sizeof(AVFrame);
773 *(AVFrame*)data = s->current_frame;
776 if (s->second_last_frame.data[0])
777 avctx->release_buffer(avctx, &s->second_last_frame);
778 s->second_last_frame = s->last_frame;
779 s->last_frame = s->current_frame;
780 s->current_frame.data[0] = NULL; /* catch any access attempts */
782 /* report that the buffer was completely consumed */
786 static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
788 IpvideoContext *s = avctx->priv_data;
790 /* release the last frame */
791 if (s->last_frame.data[0])
792 avctx->release_buffer(avctx, &s->last_frame);
793 if (s->second_last_frame.data[0])
794 avctx->release_buffer(avctx, &s->second_last_frame);
799 AVCodec interplay_video_decoder = {
802 CODEC_ID_INTERPLAY_VIDEO,
803 sizeof(IpvideoContext),
807 ipvideo_decode_frame,
809 .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),