FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
interplayvideo.c
Go to the documentation of this file.
1 /*
2  * Interplay MVE Video Decoder
3  * Copyright (c) 2003 The FFmpeg Project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the Interplay MVE format, visit:
26  * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27  * This code is written in such a way that the identifiers match up
28  * with the encoding descriptions in the document.
29  *
30  * This decoder presently only supports a PAL8 output colorspace.
31  *
32  * An Interplay video frame consists of 2 parts: The decoding map and
33  * the video data. A demuxer must load these 2 parts together in a single
34  * buffer before sending it through the stream to this decoder.
35  */
36 
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
40 
41 #include "libavutil/intreadwrite.h"
42 #include "avcodec.h"
43 #include "bytestream.h"
44 #include "hpeldsp.h"
45 #define BITSTREAM_READER_LE
46 #include "get_bits.h"
47 #include "internal.h"
48 
49 #define PALETTE_COUNT 256
50 
51 typedef struct IpvideoContext {
52 
57  const unsigned char *decoding_map;
59 
60  int is_16bpp;
62  unsigned char *pixel_ptr;
63  int line_inc;
64  int stride;
66 
67  uint32_t pal[256];
69 
70 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
71 {
72  int current_offset = s->pixel_ptr - dst->data[0];
73  int motion_offset = current_offset + delta_y * dst->linesize[0]
74  + delta_x * (1 + s->is_16bpp);
75  if (motion_offset < 0) {
76  av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
77  return AVERROR_INVALIDDATA;
78  } else if (motion_offset > s->upper_motion_limit_offset) {
79  av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
80  motion_offset, s->upper_motion_limit_offset);
81  return AVERROR_INVALIDDATA;
82  }
83  if (!src->data[0]) {
84  av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
85  return AVERROR(EINVAL);
86  }
87  s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
88  dst->linesize[0], 8);
89  return 0;
90 }
91 
93 {
94  return copy_from(s, s->last_frame, frame, 0, 0);
95 }
96 
98 {
99  return copy_from(s, s->second_last_frame, frame, 0, 0);
100 }
101 
103 {
104  unsigned char B;
105  int x, y;
106 
107  /* copy block from 2 frames ago using a motion vector; need 1 more byte */
108  if (!s->is_16bpp) {
109  B = bytestream2_get_byte(&s->stream_ptr);
110  } else {
111  B = bytestream2_get_byte(&s->mv_ptr);
112  }
113 
114  if (B < 56) {
115  x = 8 + (B % 7);
116  y = B / 7;
117  } else {
118  x = -14 + ((B - 56) % 29);
119  y = 8 + ((B - 56) / 29);
120  }
121 
122  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
123  return copy_from(s, s->second_last_frame, frame, x, y);
124 }
125 
127 {
128  unsigned char B;
129  int x, y;
130 
131  /* copy 8x8 block from current frame from an up/left block */
132 
133  /* need 1 more byte for motion */
134  if (!s->is_16bpp) {
135  B = bytestream2_get_byte(&s->stream_ptr);
136  } else {
137  B = bytestream2_get_byte(&s->mv_ptr);
138  }
139 
140  if (B < 56) {
141  x = -(8 + (B % 7));
142  y = -(B / 7);
143  } else {
144  x = -(-14 + ((B - 56) % 29));
145  y = -( 8 + ((B - 56) / 29));
146  }
147 
148  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
149  return copy_from(s, frame, frame, x, y);
150 }
151 
153 {
154  int x, y;
155  unsigned char B, BL, BH;
156 
157  /* copy a block from the previous frame; need 1 more byte */
158  if (!s->is_16bpp) {
159  B = bytestream2_get_byte(&s->stream_ptr);
160  } else {
161  B = bytestream2_get_byte(&s->mv_ptr);
162  }
163 
164  BL = B & 0x0F;
165  BH = (B >> 4) & 0x0F;
166  x = -8 + BL;
167  y = -8 + BH;
168 
169  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
170  return copy_from(s, s->last_frame, frame, x, y);
171 }
172 
174 {
175  signed char x, y;
176 
177  /* copy a block from the previous frame using an expanded range;
178  * need 2 more bytes */
179  x = bytestream2_get_byte(&s->stream_ptr);
180  y = bytestream2_get_byte(&s->stream_ptr);
181 
182  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
183  return copy_from(s, s->last_frame, frame, x, y);
184 }
185 
187 {
188  /* mystery opcode? skip multiple blocks? */
189  av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
190 
191  /* report success */
192  return 0;
193 }
194 
196 {
197  int x, y;
198  unsigned char P[2];
199  unsigned int flags;
200 
201  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
202  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
203  return AVERROR_INVALIDDATA;
204  }
205 
206  /* 2-color encoding */
207  P[0] = bytestream2_get_byte(&s->stream_ptr);
208  P[1] = bytestream2_get_byte(&s->stream_ptr);
209 
210  if (P[0] <= P[1]) {
211 
212  /* need 8 more bytes from the stream */
213  for (y = 0; y < 8; y++) {
214  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
215  for (; flags != 1; flags >>= 1)
216  *s->pixel_ptr++ = P[flags & 1];
217  s->pixel_ptr += s->line_inc;
218  }
219 
220  } else {
221 
222  /* need 2 more bytes from the stream */
223  flags = bytestream2_get_le16(&s->stream_ptr);
224  for (y = 0; y < 8; y += 2) {
225  for (x = 0; x < 8; x += 2, flags >>= 1) {
226  s->pixel_ptr[x ] =
227  s->pixel_ptr[x + 1 ] =
228  s->pixel_ptr[x + s->stride] =
229  s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
230  }
231  s->pixel_ptr += s->stride * 2;
232  }
233  }
234 
235  /* report success */
236  return 0;
237 }
238 
240 {
241  int x, y;
242  unsigned char P[4];
243  unsigned int flags = 0;
244 
245  if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
246  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
247  return AVERROR_INVALIDDATA;
248  }
249 
250  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
251  * either top and bottom or left and right halves */
252  P[0] = bytestream2_get_byte(&s->stream_ptr);
253  P[1] = bytestream2_get_byte(&s->stream_ptr);
254 
255  if (P[0] <= P[1]) {
256  for (y = 0; y < 16; y++) {
257  // new values for each 4x4 block
258  if (!(y & 3)) {
259  if (y) {
260  P[0] = bytestream2_get_byte(&s->stream_ptr);
261  P[1] = bytestream2_get_byte(&s->stream_ptr);
262  }
263  flags = bytestream2_get_le16(&s->stream_ptr);
264  }
265 
266  for (x = 0; x < 4; x++, flags >>= 1)
267  *s->pixel_ptr++ = P[flags & 1];
268  s->pixel_ptr += s->stride - 4;
269  // switch to right half
270  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
271  }
272 
273  } else {
274  flags = bytestream2_get_le32(&s->stream_ptr);
275  P[2] = bytestream2_get_byte(&s->stream_ptr);
276  P[3] = bytestream2_get_byte(&s->stream_ptr);
277 
278  if (P[2] <= P[3]) {
279 
280  /* vertical split; left & right halves are 2-color encoded */
281 
282  for (y = 0; y < 16; y++) {
283  for (x = 0; x < 4; x++, flags >>= 1)
284  *s->pixel_ptr++ = P[flags & 1];
285  s->pixel_ptr += s->stride - 4;
286  // switch to right half
287  if (y == 7) {
288  s->pixel_ptr -= 8 * s->stride - 4;
289  P[0] = P[2];
290  P[1] = P[3];
291  flags = bytestream2_get_le32(&s->stream_ptr);
292  }
293  }
294 
295  } else {
296 
297  /* horizontal split; top & bottom halves are 2-color encoded */
298 
299  for (y = 0; y < 8; y++) {
300  if (y == 4) {
301  P[0] = P[2];
302  P[1] = P[3];
303  flags = bytestream2_get_le32(&s->stream_ptr);
304  }
305 
306  for (x = 0; x < 8; x++, flags >>= 1)
307  *s->pixel_ptr++ = P[flags & 1];
308  s->pixel_ptr += s->line_inc;
309  }
310  }
311  }
312 
313  /* report success */
314  return 0;
315 }
316 
318 {
319  int x, y;
320  unsigned char P[4];
321 
322  if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
323  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
324  return AVERROR_INVALIDDATA;
325  }
326 
327  /* 4-color encoding */
329 
330  if (P[0] <= P[1]) {
331  if (P[2] <= P[3]) {
332 
333  /* 1 of 4 colors for each pixel, need 16 more bytes */
334  for (y = 0; y < 8; y++) {
335  /* get the next set of 8 2-bit flags */
336  int flags = bytestream2_get_le16(&s->stream_ptr);
337  for (x = 0; x < 8; x++, flags >>= 2)
338  *s->pixel_ptr++ = P[flags & 0x03];
339  s->pixel_ptr += s->line_inc;
340  }
341 
342  } else {
343  uint32_t flags;
344 
345  /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
346  flags = bytestream2_get_le32(&s->stream_ptr);
347 
348  for (y = 0; y < 8; y += 2) {
349  for (x = 0; x < 8; x += 2, flags >>= 2) {
350  s->pixel_ptr[x ] =
351  s->pixel_ptr[x + 1 ] =
352  s->pixel_ptr[x + s->stride] =
353  s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
354  }
355  s->pixel_ptr += s->stride * 2;
356  }
357 
358  }
359  } else {
360  uint64_t flags;
361 
362  /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
363  flags = bytestream2_get_le64(&s->stream_ptr);
364  if (P[2] <= P[3]) {
365  for (y = 0; y < 8; y++) {
366  for (x = 0; x < 8; x += 2, flags >>= 2) {
367  s->pixel_ptr[x ] =
368  s->pixel_ptr[x + 1] = P[flags & 0x03];
369  }
370  s->pixel_ptr += s->stride;
371  }
372  } else {
373  for (y = 0; y < 8; y += 2) {
374  for (x = 0; x < 8; x++, flags >>= 2) {
375  s->pixel_ptr[x ] =
376  s->pixel_ptr[x + s->stride] = P[flags & 0x03];
377  }
378  s->pixel_ptr += s->stride * 2;
379  }
380  }
381  }
382 
383  /* report success */
384  return 0;
385 }
386 
388 {
389  int x, y;
390  unsigned char P[8];
391  int flags = 0;
392 
393  if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
394  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
395  return AVERROR_INVALIDDATA;
396  }
397 
399 
400  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
401  * either top and bottom or left and right halves */
402  if (P[0] <= P[1]) {
403 
404  /* 4-color encoding for each quadrant; need 32 bytes */
405  for (y = 0; y < 16; y++) {
406  // new values for each 4x4 block
407  if (!(y & 3)) {
408  if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
409  flags = bytestream2_get_le32(&s->stream_ptr);
410  }
411 
412  for (x = 0; x < 4; x++, flags >>= 2)
413  *s->pixel_ptr++ = P[flags & 0x03];
414 
415  s->pixel_ptr += s->stride - 4;
416  // switch to right half
417  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
418  }
419 
420  } else {
421  // vertical split?
422  int vert;
423  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
424 
425  bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
426  vert = P[4] <= P[5];
427 
428  /* 4-color encoding for either left and right or top and bottom
429  * halves */
430 
431  for (y = 0; y < 16; y++) {
432  for (x = 0; x < 4; x++, flags >>= 2)
433  *s->pixel_ptr++ = P[flags & 0x03];
434 
435  if (vert) {
436  s->pixel_ptr += s->stride - 4;
437  // switch to right half
438  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
439  } else if (y & 1) s->pixel_ptr += s->line_inc;
440 
441  // load values for second half
442  if (y == 7) {
443  memcpy(P, P + 4, 4);
444  flags = bytestream2_get_le64(&s->stream_ptr);
445  }
446  }
447  }
448 
449  /* report success */
450  return 0;
451 }
452 
454 {
455  int y;
456 
457  /* 64-color encoding (each pixel in block is a different color) */
458  for (y = 0; y < 8; y++) {
460  s->pixel_ptr += s->stride;
461  }
462 
463  /* report success */
464  return 0;
465 }
466 
468 {
469  int x, y;
470 
471  /* 16-color block encoding: each 2x2 block is a different color */
472  for (y = 0; y < 8; y += 2) {
473  for (x = 0; x < 8; x += 2) {
474  s->pixel_ptr[x ] =
475  s->pixel_ptr[x + 1 ] =
476  s->pixel_ptr[x + s->stride] =
477  s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
478  }
479  s->pixel_ptr += s->stride * 2;
480  }
481 
482  /* report success */
483  return 0;
484 }
485 
487 {
488  int y;
489  unsigned char P[2];
490 
491  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
492  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
493  return AVERROR_INVALIDDATA;
494  }
495 
496  /* 4-color block encoding: each 4x4 block is a different color */
497  for (y = 0; y < 8; y++) {
498  if (!(y & 3)) {
499  P[0] = bytestream2_get_byte(&s->stream_ptr);
500  P[1] = bytestream2_get_byte(&s->stream_ptr);
501  }
502  memset(s->pixel_ptr, P[0], 4);
503  memset(s->pixel_ptr + 4, P[1], 4);
504  s->pixel_ptr += s->stride;
505  }
506 
507  /* report success */
508  return 0;
509 }
510 
512 {
513  int y;
514  unsigned char pix;
515 
516  /* 1-color encoding: the whole block is 1 solid color */
517  pix = bytestream2_get_byte(&s->stream_ptr);
518 
519  for (y = 0; y < 8; y++) {
520  memset(s->pixel_ptr, pix, 8);
521  s->pixel_ptr += s->stride;
522  }
523 
524  /* report success */
525  return 0;
526 }
527 
529 {
530  int x, y;
531  unsigned char sample[2];
532 
533  /* dithered encoding */
534  sample[0] = bytestream2_get_byte(&s->stream_ptr);
535  sample[1] = bytestream2_get_byte(&s->stream_ptr);
536 
537  for (y = 0; y < 8; y++) {
538  for (x = 0; x < 8; x += 2) {
539  *s->pixel_ptr++ = sample[ y & 1 ];
540  *s->pixel_ptr++ = sample[!(y & 1)];
541  }
542  s->pixel_ptr += s->line_inc;
543  }
544 
545  /* report success */
546  return 0;
547 }
548 
550 {
551  signed char x, y;
552 
553  /* copy a block from the second last frame using an expanded range */
554  x = bytestream2_get_byte(&s->stream_ptr);
555  y = bytestream2_get_byte(&s->stream_ptr);
556 
557  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
558  return copy_from(s, s->second_last_frame, frame, x, y);
559 }
560 
562 {
563  int x, y;
564  uint16_t P[2];
565  unsigned int flags;
566  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
567 
568  /* 2-color encoding */
569  P[0] = bytestream2_get_le16(&s->stream_ptr);
570  P[1] = bytestream2_get_le16(&s->stream_ptr);
571 
572  if (!(P[0] & 0x8000)) {
573 
574  for (y = 0; y < 8; y++) {
575  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
576  for (; flags != 1; flags >>= 1)
577  *pixel_ptr++ = P[flags & 1];
578  pixel_ptr += s->line_inc;
579  }
580 
581  } else {
582 
583  flags = bytestream2_get_le16(&s->stream_ptr);
584  for (y = 0; y < 8; y += 2) {
585  for (x = 0; x < 8; x += 2, flags >>= 1) {
586  pixel_ptr[x ] =
587  pixel_ptr[x + 1 ] =
588  pixel_ptr[x + s->stride] =
589  pixel_ptr[x + 1 + s->stride] = P[flags & 1];
590  }
591  pixel_ptr += s->stride * 2;
592  }
593  }
594 
595  return 0;
596 }
597 
599 {
600  int x, y;
601  uint16_t P[4];
602  unsigned int flags = 0;
603  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
604 
605  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
606  * either top and bottom or left and right halves */
607  P[0] = bytestream2_get_le16(&s->stream_ptr);
608  P[1] = bytestream2_get_le16(&s->stream_ptr);
609 
610  if (!(P[0] & 0x8000)) {
611 
612  for (y = 0; y < 16; y++) {
613  // new values for each 4x4 block
614  if (!(y & 3)) {
615  if (y) {
616  P[0] = bytestream2_get_le16(&s->stream_ptr);
617  P[1] = bytestream2_get_le16(&s->stream_ptr);
618  }
619  flags = bytestream2_get_le16(&s->stream_ptr);
620  }
621 
622  for (x = 0; x < 4; x++, flags >>= 1)
623  *pixel_ptr++ = P[flags & 1];
624  pixel_ptr += s->stride - 4;
625  // switch to right half
626  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
627  }
628 
629  } else {
630 
631  flags = bytestream2_get_le32(&s->stream_ptr);
632  P[2] = bytestream2_get_le16(&s->stream_ptr);
633  P[3] = bytestream2_get_le16(&s->stream_ptr);
634 
635  if (!(P[2] & 0x8000)) {
636 
637  /* vertical split; left & right halves are 2-color encoded */
638 
639  for (y = 0; y < 16; y++) {
640  for (x = 0; x < 4; x++, flags >>= 1)
641  *pixel_ptr++ = P[flags & 1];
642  pixel_ptr += s->stride - 4;
643  // switch to right half
644  if (y == 7) {
645  pixel_ptr -= 8 * s->stride - 4;
646  P[0] = P[2];
647  P[1] = P[3];
648  flags = bytestream2_get_le32(&s->stream_ptr);
649  }
650  }
651 
652  } else {
653 
654  /* horizontal split; top & bottom halves are 2-color encoded */
655 
656  for (y = 0; y < 8; y++) {
657  if (y == 4) {
658  P[0] = P[2];
659  P[1] = P[3];
660  flags = bytestream2_get_le32(&s->stream_ptr);
661  }
662 
663  for (x = 0; x < 8; x++, flags >>= 1)
664  *pixel_ptr++ = P[flags & 1];
665  pixel_ptr += s->line_inc;
666  }
667  }
668  }
669 
670  /* report success */
671  return 0;
672 }
673 
675 {
676  int x, y;
677  uint16_t P[4];
678  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
679 
680  /* 4-color encoding */
681  for (x = 0; x < 4; x++)
682  P[x] = bytestream2_get_le16(&s->stream_ptr);
683 
684  if (!(P[0] & 0x8000)) {
685  if (!(P[2] & 0x8000)) {
686 
687  /* 1 of 4 colors for each pixel */
688  for (y = 0; y < 8; y++) {
689  /* get the next set of 8 2-bit flags */
690  int flags = bytestream2_get_le16(&s->stream_ptr);
691  for (x = 0; x < 8; x++, flags >>= 2)
692  *pixel_ptr++ = P[flags & 0x03];
693  pixel_ptr += s->line_inc;
694  }
695 
696  } else {
697  uint32_t flags;
698 
699  /* 1 of 4 colors for each 2x2 block */
700  flags = bytestream2_get_le32(&s->stream_ptr);
701 
702  for (y = 0; y < 8; y += 2) {
703  for (x = 0; x < 8; x += 2, flags >>= 2) {
704  pixel_ptr[x ] =
705  pixel_ptr[x + 1 ] =
706  pixel_ptr[x + s->stride] =
707  pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
708  }
709  pixel_ptr += s->stride * 2;
710  }
711 
712  }
713  } else {
714  uint64_t flags;
715 
716  /* 1 of 4 colors for each 2x1 or 1x2 block */
717  flags = bytestream2_get_le64(&s->stream_ptr);
718  if (!(P[2] & 0x8000)) {
719  for (y = 0; y < 8; y++) {
720  for (x = 0; x < 8; x += 2, flags >>= 2) {
721  pixel_ptr[x ] =
722  pixel_ptr[x + 1] = P[flags & 0x03];
723  }
724  pixel_ptr += s->stride;
725  }
726  } else {
727  for (y = 0; y < 8; y += 2) {
728  for (x = 0; x < 8; x++, flags >>= 2) {
729  pixel_ptr[x ] =
730  pixel_ptr[x + s->stride] = P[flags & 0x03];
731  }
732  pixel_ptr += s->stride * 2;
733  }
734  }
735  }
736 
737  /* report success */
738  return 0;
739 }
740 
742 {
743  int x, y;
744  uint16_t P[8];
745  int flags = 0;
746  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
747 
748  for (x = 0; x < 4; x++)
749  P[x] = bytestream2_get_le16(&s->stream_ptr);
750 
751  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
752  * either top and bottom or left and right halves */
753  if (!(P[0] & 0x8000)) {
754 
755  /* 4-color encoding for each quadrant */
756  for (y = 0; y < 16; y++) {
757  // new values for each 4x4 block
758  if (!(y & 3)) {
759  if (y)
760  for (x = 0; x < 4; x++)
761  P[x] = bytestream2_get_le16(&s->stream_ptr);
762  flags = bytestream2_get_le32(&s->stream_ptr);
763  }
764 
765  for (x = 0; x < 4; x++, flags >>= 2)
766  *pixel_ptr++ = P[flags & 0x03];
767 
768  pixel_ptr += s->stride - 4;
769  // switch to right half
770  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
771  }
772 
773  } else {
774  // vertical split?
775  int vert;
776  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
777 
778  for (x = 4; x < 8; x++)
779  P[x] = bytestream2_get_le16(&s->stream_ptr);
780  vert = !(P[4] & 0x8000);
781 
782  /* 4-color encoding for either left and right or top and bottom
783  * halves */
784 
785  for (y = 0; y < 16; y++) {
786  for (x = 0; x < 4; x++, flags >>= 2)
787  *pixel_ptr++ = P[flags & 0x03];
788 
789  if (vert) {
790  pixel_ptr += s->stride - 4;
791  // switch to right half
792  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
793  } else if (y & 1) pixel_ptr += s->line_inc;
794 
795  // load values for second half
796  if (y == 7) {
797  memcpy(P, P + 4, 8);
798  flags = bytestream2_get_le64(&s->stream_ptr);
799  }
800  }
801  }
802 
803  /* report success */
804  return 0;
805 }
806 
808 {
809  int x, y;
810  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
811 
812  /* 64-color encoding (each pixel in block is a different color) */
813  for (y = 0; y < 8; y++) {
814  for (x = 0; x < 8; x++)
815  pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
816  pixel_ptr += s->stride;
817  }
818 
819  /* report success */
820  return 0;
821 }
822 
824 {
825  int x, y;
826  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
827 
828  /* 16-color block encoding: each 2x2 block is a different color */
829  for (y = 0; y < 8; y += 2) {
830  for (x = 0; x < 8; x += 2) {
831  pixel_ptr[x ] =
832  pixel_ptr[x + 1 ] =
833  pixel_ptr[x + s->stride] =
834  pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
835  }
836  pixel_ptr += s->stride * 2;
837  }
838 
839  /* report success */
840  return 0;
841 }
842 
844 {
845  int x, y;
846  uint16_t P[2];
847  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
848 
849  /* 4-color block encoding: each 4x4 block is a different color */
850  for (y = 0; y < 8; y++) {
851  if (!(y & 3)) {
852  P[0] = bytestream2_get_le16(&s->stream_ptr);
853  P[1] = bytestream2_get_le16(&s->stream_ptr);
854  }
855  for (x = 0; x < 8; x++)
856  pixel_ptr[x] = P[x >> 2];
857  pixel_ptr += s->stride;
858  }
859 
860  /* report success */
861  return 0;
862 }
863 
865 {
866  int x, y;
867  uint16_t pix;
868  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
869 
870  /* 1-color encoding: the whole block is 1 solid color */
871  pix = bytestream2_get_le16(&s->stream_ptr);
872 
873  for (y = 0; y < 8; y++) {
874  for (x = 0; x < 8; x++)
875  pixel_ptr[x] = pix;
876  pixel_ptr += s->stride;
877  }
878 
879  /* report success */
880  return 0;
881 }
882 
883 static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
892 };
893 
894 static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
903 };
904 
906 {
907  int x, y;
908  unsigned char opcode;
909  int ret;
910  GetBitContext gb;
911 
912  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
913  if (!s->is_16bpp) {
914  /* this is PAL8, so make the palette available */
915  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
916 
917  s->stride = frame->linesize[0];
918  } else {
919  s->stride = frame->linesize[0] >> 1;
920  s->mv_ptr = s->stream_ptr;
921  bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
922  }
923  s->line_inc = s->stride - 8;
924  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
925  + (s->avctx->width - 8) * (1 + s->is_16bpp);
926 
928  for (y = 0; y < s->avctx->height; y += 8) {
929  for (x = 0; x < s->avctx->width; x += 8) {
930  opcode = get_bits(&gb, 4);
931 
932  ff_tlog(s->avctx,
933  " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
934  x, y, opcode, bytestream2_tell(&s->stream_ptr));
935 
936  if (!s->is_16bpp) {
937  s->pixel_ptr = frame->data[0] + x
938  + y*frame->linesize[0];
939  ret = ipvideo_decode_block[opcode](s, frame);
940  } else {
941  s->pixel_ptr = frame->data[0] + x*2
942  + y*frame->linesize[0];
943  ret = ipvideo_decode_block16[opcode](s, frame);
944  }
945  if (ret != 0) {
946  av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
947  s->avctx->frame_number, x, y);
948  return;
949  }
950  }
951  }
952  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
954  "decode finished with %d bytes left over\n",
956  }
957 }
958 
960 {
961  IpvideoContext *s = avctx->priv_data;
962 
963  s->avctx = avctx;
964 
965  s->is_16bpp = avctx->bits_per_coded_sample == 16;
967 
968  ff_hpeldsp_init(&s->hdsp, avctx->flags);
969 
970  s->last_frame = av_frame_alloc();
972  if (!s->last_frame || !s->second_last_frame) {
975  return AVERROR(ENOMEM);
976  }
977 
978  return 0;
979 }
980 
982  void *data, int *got_frame,
983  AVPacket *avpkt)
984 {
985  const uint8_t *buf = avpkt->data;
986  int buf_size = avpkt->size;
987  IpvideoContext *s = avctx->priv_data;
988  AVFrame *frame = data;
989  int ret;
990 
991  if (buf_size < 2)
992  return AVERROR_INVALIDDATA;
993 
994  /* decoding map contains 4 bits of information per 8x8 block */
995  s->decoding_map_size = AV_RL16(avpkt->data);
996 
997  /* compressed buffer needs to be large enough to at least hold an entire
998  * decoding map */
999  if (buf_size < s->decoding_map_size + 2)
1000  return buf_size;
1001 
1005  }
1006 
1007  s->decoding_map = buf + 2;
1008  bytestream2_init(&s->stream_ptr, buf + 2 + s->decoding_map_size,
1009  buf_size - s->decoding_map_size);
1010 
1011  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1012  return ret;
1013 
1014  if (!s->is_16bpp) {
1016  if (pal) {
1017  frame->palette_has_changed = 1;
1018  memcpy(s->pal, pal, AVPALETTE_SIZE);
1019  }
1020  }
1021 
1022  ipvideo_decode_opcodes(s, frame);
1023 
1024  *got_frame = 1;
1025 
1026  /* shuffle frames */
1029  if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
1030  return ret;
1031 
1032  /* report that the buffer was completely consumed */
1033  return buf_size;
1034 }
1035 
1037 {
1038  IpvideoContext *s = avctx->priv_data;
1039 
1042 
1043  return 0;
1044 }
1045 
1047  .name = "interplayvideo",
1048  .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1049  .type = AVMEDIA_TYPE_VIDEO,
1051  .priv_data_size = sizeof(IpvideoContext),
1053  .close = ipvideo_decode_end,
1055  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
1056 };
#define ff_tlog(ctx,...)
Definition: internal.h:65
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:631
#define P
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:181
const unsigned char * decoding_map
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
unsigned char * pixel_ptr
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:260
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
int size
Definition: avcodec.h:1468
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1752
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
#define sample
AVCodec.
Definition: avcodec.h:3392
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:87
static int(*const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame)
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:141
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
8 bit with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:74
#define AVPALETTE_SIZE
Definition: pixfmt.h:33
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:375
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
static AVFrame * frame
uint8_t * data
Definition: avcodec.h:1467
bitstream reader API header.
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:2917
#define av_log(a,...)
AVFrame * last_frame
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows:
Definition: avcodec.h:1268
#define AVERROR(e)
Definition: error.h:43
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:154
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:176
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:263
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:1627
static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
const char * name
Name of the codec implementation.
Definition: avcodec.h:3399
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
GetByteContext stream_ptr
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
av_cold void ff_hpeldsp_init(HpelDSPContext *c, int flags)
Definition: hpeldsp.c:338
Half-pel DSP context.
Definition: hpeldsp.h:45
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
int width
picture width / height.
Definition: avcodec.h:1711
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
AVFrame * second_last_frame
#define src
Definition: vp9dsp.c:530
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:188
Definition: vf_geq.c:46
Half-pel DSP functions.
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:209
main external API structure.
Definition: avcodec.h:1532
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
GetByteContext mv_ptr
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: utils.c:894
op_pixels_func put_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: hpeldsp.h:56
HpelDSPContext hdsp
void * buf
Definition: avisynth_c.h:553
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
AVCodecContext * avctx
static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:418
int palette_has_changed
Tell user application that palette has changed from previous frame.
Definition: frame.h:322
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
AVCodec ff_interplay_video_decoder
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:474
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
static int flags
Definition: cpu.c:47
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:192
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
static int decode(AVCodecContext *avctx, void *data, int *got_sub, AVPacket *avpkt)
Definition: ccaption_dec.c:572
uint32_t pal[256]
static void ipvideo_decode_opcodes(IpvideoContext *s, AVFrame *frame)
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:282
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: avcodec.h:927
int upper_motion_limit_offset
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:318
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
void * priv_data
Definition: avcodec.h:1574
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:2318
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
uint8_t * av_packet_get_side_data(AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
Definition: avpacket.c:320
#define FFSWAP(type, a, b)
Definition: common.h:99
static int(*const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame)
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
This structure stores compressed data.
Definition: avcodec.h:1444
static int ipvideo_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1241
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:856
for(j=16;j >0;--j)