2 * Interplay MVE Video Decoder
3 * Copyright (C) 2003 the ffmpeg project
5 * This file is part of FFmpeg.
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 * @file libavcodec/interplayvideo.c
24 * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25 * For more information about the Interplay MVE format, visit:
26 * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27 * This code is written in such a way that the identifiers match up
28 * with the encoding descriptions in the document.
30 * This decoder presently only supports a PAL8 output colorspace.
32 * An Interplay video frame consists of 2 parts: The decoding map and
33 * the video data. A demuxer must load these 2 parts together in a single
34 * buffer before sending it through the stream to this decoder.
43 #include "bytestream.h"
46 #define PALETTE_COUNT 256
48 /* debugging support */
49 #define DEBUG_INTERPLAY 0
51 #define debug_interplay(x,...) av_log(NULL, AV_LOG_DEBUG, x, __VA_ARGS__)
53 static inline void debug_interplay(const char *format, ...) { }
56 typedef struct IpvideoContext {
58 AVCodecContext *avctx;
60 AVFrame second_last_frame;
62 AVFrame current_frame;
63 const unsigned char *decoding_map;
64 int decoding_map_size;
66 const unsigned char *buf;
69 const unsigned char *stream_ptr;
70 const unsigned char *stream_end;
71 unsigned char *pixel_ptr;
74 int upper_motion_limit_offset;
78 #define CHECK_STREAM_PTR(n) \
79 if (s->stream_end - s->stream_ptr < n) { \
80 av_log(s->avctx, AV_LOG_ERROR, "Interplay video warning: stream_ptr out of bounds (%p >= %p)\n", \
81 s->stream_ptr + n, s->stream_end); \
85 static int copy_from(IpvideoContext *s, AVFrame *src, int delta_x, int delta_y)
87 int current_offset = s->pixel_ptr - s->current_frame.data[0];
88 int motion_offset = current_offset + delta_y * s->stride + delta_x;
89 if (motion_offset < 0) {
90 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset < 0 (%d)\n", motion_offset);
92 } else if (motion_offset > s->upper_motion_limit_offset) {
93 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: motion offset above limit (%d >= %d)\n",
94 motion_offset, s->upper_motion_limit_offset);
97 s->dsp.put_pixels_tab[1][0](s->pixel_ptr, src->data[0] + motion_offset, s->stride, 8);
101 static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s)
103 return copy_from(s, &s->last_frame, 0, 0);
106 static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s)
108 return copy_from(s, &s->second_last_frame, 0, 0);
111 static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s)
116 /* copy block from 2 frames ago using a motion vector; need 1 more byte */
118 B = *s->stream_ptr++;
124 x = -14 + ((B - 56) % 29);
125 y = 8 + ((B - 56) / 29);
128 debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
129 return copy_from(s, &s->second_last_frame, x, y);
132 static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s)
137 /* copy 8x8 block from current frame from an up/left block */
139 /* need 1 more byte for motion */
141 B = *s->stream_ptr++;
147 x = -(-14 + ((B - 56) % 29));
148 y = -( 8 + ((B - 56) / 29));
151 debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
152 return copy_from(s, &s->current_frame, x, y);
155 static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s)
158 unsigned char B, BL, BH;
160 /* copy a block from the previous frame; need 1 more byte */
163 B = *s->stream_ptr++;
165 BH = (B >> 4) & 0x0F;
169 debug_interplay (" motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
170 return copy_from(s, &s->last_frame, x, y);
173 static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s)
177 /* copy a block from the previous frame using an expanded range;
178 * need 2 more bytes */
181 x = *s->stream_ptr++;
182 y = *s->stream_ptr++;
184 debug_interplay (" motion bytes = %d, %d\n", x, y);
185 return copy_from(s, &s->last_frame, x, y);
188 static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s)
190 /* mystery opcode? skip multiple blocks? */
191 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: Help! Mystery opcode 0x6 seen\n");
197 static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s)
203 /* 2-color encoding */
206 P[0] = *s->stream_ptr++;
207 P[1] = *s->stream_ptr++;
211 /* need 8 more bytes from the stream */
214 for (y = 0; y < 8; y++) {
215 flags = *s->stream_ptr++;
216 for (x = 0x01; x <= 0x80; x <<= 1) {
217 *s->pixel_ptr++ = P[!!(flags & x)];
219 s->pixel_ptr += s->line_inc;
224 /* need 2 more bytes from the stream */
227 flags = bytestream_get_le16(&s->stream_ptr);
228 for (y = 0; y < 8; y += 2) {
229 for (x = 0; x < 8; x += 2, flags >>= 1) {
231 s->pixel_ptr[x + 1 ] =
232 s->pixel_ptr[x + s->stride] =
233 s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
235 s->pixel_ptr += s->stride * 2;
243 static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s)
248 unsigned int flags = 0;
249 unsigned char P0 = 0, P1 = 0;
252 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
253 * either top and bottom or left and right halves */
256 P[0] = *s->stream_ptr++;
257 P[1] = *s->stream_ptr++;
261 /* need 12 more bytes */
262 CHECK_STREAM_PTR(12);
263 B[0] = *s->stream_ptr++; B[1] = *s->stream_ptr++;
264 P[2] = *s->stream_ptr++; P[3] = *s->stream_ptr++;
265 B[2] = *s->stream_ptr++; B[3] = *s->stream_ptr++;
266 P[4] = *s->stream_ptr++; P[5] = *s->stream_ptr++;
267 B[4] = *s->stream_ptr++; B[5] = *s->stream_ptr++;
268 P[6] = *s->stream_ptr++; P[7] = *s->stream_ptr++;
269 B[6] = *s->stream_ptr++; B[7] = *s->stream_ptr++;
271 for (y = 0; y < 8; y++) {
273 /* time to reload flags? */
276 ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
277 ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
278 ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
279 ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
280 lower_half = 0; /* still on top half */
283 ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
284 ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
285 ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
286 ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
290 for (x = 0; x < 8; x++, flags >>= 1) {
291 /* get the pixel values ready for this quadrant */
293 P0 = P[lower_half + 0];
294 P1 = P[lower_half + 1];
296 P0 = P[lower_half + 4];
297 P1 = P[lower_half + 5];
300 *s->pixel_ptr++ = flags & 1 ? P1 : P0;
302 s->pixel_ptr += s->line_inc;
307 /* need 10 more bytes */
308 CHECK_STREAM_PTR(10);
309 B[0] = *s->stream_ptr++; B[1] = *s->stream_ptr++;
310 B[2] = *s->stream_ptr++; B[3] = *s->stream_ptr++;
311 P[2] = *s->stream_ptr++; P[3] = *s->stream_ptr++;
312 B[4] = *s->stream_ptr++; B[5] = *s->stream_ptr++;
313 B[6] = *s->stream_ptr++; B[7] = *s->stream_ptr++;
317 /* vertical split; left & right halves are 2-color encoded */
319 for (y = 0; y < 8; y++) {
321 /* time to reload flags? */
324 ((B[0] & 0xF0) << 4) | ((B[4] & 0xF0) << 8) |
325 ((B[0] & 0x0F) ) | ((B[4] & 0x0F) << 4) |
326 ((B[1] & 0xF0) << 20) | ((B[5] & 0xF0) << 24) |
327 ((B[1] & 0x0F) << 16) | ((B[5] & 0x0F) << 20);
330 ((B[2] & 0xF0) << 4) | ((B[6] & 0xF0) << 8) |
331 ((B[2] & 0x0F) ) | ((B[6] & 0x0F) << 4) |
332 ((B[3] & 0xF0) << 20) | ((B[7] & 0xF0) << 24) |
333 ((B[3] & 0x0F) << 16) | ((B[7] & 0x0F) << 20);
336 for (x = 0; x < 8; x++, flags >>= 1) {
337 /* get the pixel values ready for this half */
346 *s->pixel_ptr++ = flags & 1 ? P1 : P0;
348 s->pixel_ptr += s->line_inc;
353 /* horizontal split; top & bottom halves are 2-color encoded */
355 for (y = 0; y < 8; y++) {
367 for (bitmask = 0x01; bitmask <= 0x80; bitmask <<= 1) {
369 *s->pixel_ptr++ = flags & bitmask ? P1 : P0;
371 s->pixel_ptr += s->line_inc;
380 static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s)
384 unsigned int flags = 0;
387 /* 4-color encoding */
390 memcpy(P, s->stream_ptr, 4);
393 if ((P[0] <= P[1]) && (P[2] <= P[3])) {
395 /* 1 of 4 colors for each pixel, need 16 more bytes */
396 CHECK_STREAM_PTR(16);
398 for (y = 0; y < 8; y++) {
399 /* get the next set of 8 2-bit flags */
400 flags = bytestream_get_le16(&s->stream_ptr);
401 for (x = 0; x < 8; x++, flags >>= 2) {
402 *s->pixel_ptr++ = P[flags & 0x03];
404 s->pixel_ptr += s->line_inc;
407 } else if ((P[0] <= P[1]) && (P[2] > P[3])) {
409 /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
412 flags = bytestream_get_le32(&s->stream_ptr);
414 for (y = 0; y < 8; y += 2) {
415 for (x = 0; x < 8; x += 2, flags >>= 2) {
416 pix = P[flags & 0x03];
418 s->pixel_ptr[x + 1 ] =
419 s->pixel_ptr[x + s->stride] =
420 s->pixel_ptr[x + 1 + s->stride] = pix;
422 s->pixel_ptr += s->stride * 2;
425 } else if ((P[0] > P[1]) && (P[2] <= P[3])) {
427 /* 1 of 4 colors for each 2x1 block, need 8 more bytes */
430 for (y = 0; y < 8; y++) {
431 /* time to reload flags? */
432 if ((y == 0) || (y == 4)) {
433 flags = bytestream_get_le32(&s->stream_ptr);
435 for (x = 0; x < 8; x += 2, flags >>= 2) {
436 pix = P[flags & 0x03];
438 s->pixel_ptr[x + 1] = pix;
440 s->pixel_ptr += s->stride;
445 /* 1 of 4 colors for each 1x2 block, need 8 more bytes */
448 for (y = 0; y < 8; y += 2) {
449 /* time to reload flags? */
450 if ((y == 0) || (y == 4)) {
451 flags = bytestream_get_le32(&s->stream_ptr);
453 for (x = 0; x < 8; x++, flags >>= 2) {
454 pix = P[flags & 0x03];
456 s->pixel_ptr[x + s->stride] = pix;
458 s->pixel_ptr += s->stride * 2;
466 static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s)
476 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
477 * either top and bottom or left and right halves */
480 memcpy(P, s->stream_ptr, 4);
485 /* 4-color encoding for each quadrant; need 28 more bytes */
486 CHECK_STREAM_PTR(28);
488 memcpy(B, s->stream_ptr, 4);
490 for (y = 4; y < 16; y += 4) {
491 memcpy(P + y, s->stream_ptr, 4);
493 memcpy(B + y, s->stream_ptr, 4);
497 for (y = 0; y < 8; y++) {
499 lower_half = (y >= 4) ? 4 : 0;
500 flags = (B[y + 8] << 8) | B[y];
502 for (x = 0; x < 8; x++, flags >>= 2) {
503 split = (x >= 4) ? 8 : 0;
504 index = split + lower_half + (flags & 0x03);
505 *s->pixel_ptr++ = P[index];
508 s->pixel_ptr += s->line_inc;
513 /* 4-color encoding for either left and right or top and bottom
514 * halves; need 20 more bytes */
515 CHECK_STREAM_PTR(20);
517 memcpy(B, s->stream_ptr, 8);
519 memcpy(P + 4, s->stream_ptr, 4);
521 memcpy(B + 8, s->stream_ptr, 8);
526 /* block is divided into left and right halves */
527 for (y = 0; y < 8; y++) {
529 flags = (B[y + 8] << 8) | B[y];
532 for (x = 0; x < 8; x++, flags >>= 2) {
535 *s->pixel_ptr++ = P[split + (flags & 0x03)];
538 s->pixel_ptr += s->line_inc;
543 /* block is divided into top and bottom halves */
545 for (y = 0; y < 8; y++) {
547 flags = (B[y * 2 + 1] << 8) | B[y * 2];
551 for (x = 0; x < 8; x++, flags >>= 2)
552 *s->pixel_ptr++ = P[split + (flags & 0x03)];
554 s->pixel_ptr += s->line_inc;
563 static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s)
567 /* 64-color encoding (each pixel in block is a different color) */
568 CHECK_STREAM_PTR(64);
570 for (y = 0; y < 8; y++) {
571 memcpy(s->pixel_ptr, s->stream_ptr, 8);
573 s->pixel_ptr += s->stride;
580 static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s)
585 /* 16-color block encoding: each 2x2 block is a different color */
586 CHECK_STREAM_PTR(16);
588 for (y = 0; y < 8; y += 2) {
589 for (x = 0; x < 8; x += 2) {
590 pix = *s->stream_ptr++;
592 s->pixel_ptr[x + 1 ] =
593 s->pixel_ptr[x + s->stride] =
594 s->pixel_ptr[x + 1 + s->stride] = pix;
596 s->pixel_ptr += s->stride * 2;
603 static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s)
607 unsigned char index = 0;
609 /* 4-color block encoding: each 4x4 block is a different color */
612 memcpy(P, s->stream_ptr, 4);
615 for (y = 0; y < 8; y++) {
621 memset(s->pixel_ptr , P[index ], 4);
622 memset(s->pixel_ptr + 4, P[index + 1], 4);
623 s->pixel_ptr += s->stride;
630 static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s)
635 /* 1-color encoding: the whole block is 1 solid color */
637 pix = *s->stream_ptr++;
639 for (y = 0; y < 8; y++) {
640 memset(s->pixel_ptr, pix, 8);
641 s->pixel_ptr += s->stride;
648 static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s)
651 unsigned char sample[2];
653 /* dithered encoding */
655 sample[0] = *s->stream_ptr++;
656 sample[1] = *s->stream_ptr++;
658 for (y = 0; y < 8; y++) {
659 for (x = 0; x < 8; x += 2) {
660 *s->pixel_ptr++ = sample[ y & 1 ];
661 *s->pixel_ptr++ = sample[!(y & 1)];
663 s->pixel_ptr += s->line_inc;
670 static int (* const ipvideo_decode_block[])(IpvideoContext *s) = {
671 ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
672 ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
673 ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
674 ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
675 ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
676 ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
677 ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
678 ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
681 static void ipvideo_decode_opcodes(IpvideoContext *s)
685 unsigned char opcode;
687 int code_counts[16] = {0};
688 static int frame = 0;
690 debug_interplay("------------------ frame %d\n", frame);
693 /* this is PAL8, so make the palette available */
694 memcpy(s->current_frame.data[1], s->avctx->palctrl->palette, PALETTE_COUNT * 4);
696 s->stride = s->current_frame.linesize[0];
697 s->stream_ptr = s->buf + 14; /* data starts 14 bytes in */
698 s->stream_end = s->buf + s->size;
699 s->line_inc = s->stride - 8;
700 s->upper_motion_limit_offset = (s->avctx->height - 8) * s->stride
701 + s->avctx->width - 8;
703 for (y = 0; y < (s->stride * s->avctx->height); y += s->stride * 8) {
704 for (x = y; x < y + s->avctx->width; x += 8) {
705 /* bottom nibble first, then top nibble (which makes it
706 * hard to use a GetBitcontext) */
708 opcode = s->decoding_map[index >> 1] >> 4;
710 opcode = s->decoding_map[index >> 1] & 0xF;
713 debug_interplay(" block @ (%3d, %3d): encoding 0x%X, data ptr @ %p\n",
714 x - y, y / s->stride, opcode, s->stream_ptr);
715 code_counts[opcode]++;
717 s->pixel_ptr = s->current_frame.data[0] + x;
718 ret = ipvideo_decode_block[opcode](s);
720 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode problem on frame %d, @ block (%d, %d)\n",
721 frame, x - y, y / s->stride);
726 if (s->stream_end - s->stream_ptr > 1) {
727 av_log(s->avctx, AV_LOG_ERROR, " Interplay video: decode finished with %td bytes left over\n",
728 s->stream_end - s->stream_ptr);
732 static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
734 IpvideoContext *s = avctx->priv_data;
738 if (s->avctx->palctrl == NULL) {
739 av_log(avctx, AV_LOG_ERROR, " Interplay video: palette expected.\n");
743 avctx->pix_fmt = PIX_FMT_PAL8;
744 dsputil_init(&s->dsp, avctx);
746 /* decoding map contains 4 bits of information per 8x8 block */
747 s->decoding_map_size = avctx->width * avctx->height / (8 * 8 * 2);
749 s->current_frame.data[0] = s->last_frame.data[0] =
750 s->second_last_frame.data[0] = NULL;
755 static int ipvideo_decode_frame(AVCodecContext *avctx,
756 void *data, int *data_size,
757 const uint8_t *buf, int buf_size)
759 IpvideoContext *s = avctx->priv_data;
760 AVPaletteControl *palette_control = avctx->palctrl;
762 /* compressed buffer needs to be large enough to at least hold an entire
764 if (buf_size < s->decoding_map_size)
767 s->decoding_map = buf;
768 s->buf = buf + s->decoding_map_size;
769 s->size = buf_size - s->decoding_map_size;
771 s->current_frame.reference = 3;
772 if (avctx->get_buffer(avctx, &s->current_frame)) {
773 av_log(avctx, AV_LOG_ERROR, " Interplay Video: get_buffer() failed\n");
777 ipvideo_decode_opcodes(s);
779 if (palette_control->palette_changed) {
780 palette_control->palette_changed = 0;
781 s->current_frame.palette_has_changed = 1;
784 *data_size = sizeof(AVFrame);
785 *(AVFrame*)data = s->current_frame;
788 if (s->second_last_frame.data[0])
789 avctx->release_buffer(avctx, &s->second_last_frame);
790 s->second_last_frame = s->last_frame;
791 s->last_frame = s->current_frame;
792 s->current_frame.data[0] = NULL; /* catch any access attempts */
794 /* report that the buffer was completely consumed */
798 static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
800 IpvideoContext *s = avctx->priv_data;
802 /* release the last frame */
803 if (s->last_frame.data[0])
804 avctx->release_buffer(avctx, &s->last_frame);
805 if (s->second_last_frame.data[0])
806 avctx->release_buffer(avctx, &s->second_last_frame);
811 AVCodec interplay_video_decoder = {
814 CODEC_ID_INTERPLAY_VIDEO,
815 sizeof(IpvideoContext),
819 ipvideo_decode_frame,
821 .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),