FFmpeg
interplayvideo.c
Go to the documentation of this file.
1 /*
2  * Interplay MVE Video Decoder
3  * Copyright (C) 2003 The FFmpeg project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the Interplay MVE format, visit:
26  * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27  * This code is written in such a way that the identifiers match up
28  * with the encoding descriptions in the document.
29  *
30  * This decoder presently only supports a PAL8 output colorspace.
31  *
32  * An Interplay video frame consists of 2 parts: The decoding map and
33  * the video data. A demuxer must load these 2 parts together in a single
34  * buffer before sending it through the stream to this decoder.
35  */
36 
37 #include <stdio.h>
38 #include <string.h>
39 
40 #include "libavutil/intreadwrite.h"
41 
42 #define BITSTREAM_READER_LE
43 #include "avcodec.h"
44 #include "bytestream.h"
45 #include "codec_internal.h"
46 #include "decode.h"
47 #include "get_bits.h"
48 #include "hpeldsp.h"
49 
50 #define PALETTE_COUNT 256
51 
52 typedef struct IpvideoContext {
53 
58 
59  /* For format 0x10 */
62 
63  const unsigned char *decoding_map;
65  const unsigned char *skip_map;
67 
68  int is_16bpp;
70  unsigned char *pixel_ptr;
71  int line_inc;
72  int stride;
74 
75  uint32_t pal[256];
77 
78 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
79 {
80  int width = dst->width;
81  int current_offset = s->pixel_ptr - dst->data[0];
82  int x = (current_offset % dst->linesize[0]) / (1 + s->is_16bpp);
83  int y = current_offset / dst->linesize[0];
84  int dx = delta_x + x - ((delta_x + x >= width) - (delta_x + x < 0)) * width;
85  int dy = delta_y + y + (delta_x + x >= width) - (delta_x + x < 0);
86  int motion_offset = dy * src->linesize[0] + dx * (1 + s->is_16bpp);
87 
88  if (motion_offset < 0) {
89  av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
90  return AVERROR_INVALIDDATA;
91  } else if (motion_offset > s->upper_motion_limit_offset) {
92  av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
93  motion_offset, s->upper_motion_limit_offset);
94  return AVERROR_INVALIDDATA;
95  }
96  if (!src->data[0]) {
97  av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
98  return AVERROR(EINVAL);
99  }
100  s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
101  dst->linesize[0], 8);
102  return 0;
103 }
104 
106 {
107  return copy_from(s, s->last_frame, frame, 0, 0);
108 }
109 
111 {
112  return copy_from(s, s->second_last_frame, frame, 0, 0);
113 }
114 
116 {
117  unsigned char B;
118  int x, y;
119 
120  /* copy block from 2 frames ago using a motion vector; need 1 more byte */
121  if (!s->is_16bpp) {
122  B = bytestream2_get_byte(&s->stream_ptr);
123  } else {
124  B = bytestream2_get_byte(&s->mv_ptr);
125  }
126 
127  if (B < 56) {
128  x = 8 + (B % 7);
129  y = B / 7;
130  } else {
131  x = -14 + ((B - 56) % 29);
132  y = 8 + ((B - 56) / 29);
133  }
134 
135  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
136  return copy_from(s, s->second_last_frame, frame, x, y);
137 }
138 
140 {
141  unsigned char B;
142  int x, y;
143 
144  /* copy 8x8 block from current frame from an up/left block */
145 
146  /* need 1 more byte for motion */
147  if (!s->is_16bpp) {
148  B = bytestream2_get_byte(&s->stream_ptr);
149  } else {
150  B = bytestream2_get_byte(&s->mv_ptr);
151  }
152 
153  if (B < 56) {
154  x = -(8 + (B % 7));
155  y = -(B / 7);
156  } else {
157  x = -(-14 + ((B - 56) % 29));
158  y = -( 8 + ((B - 56) / 29));
159  }
160 
161  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
162  return copy_from(s, frame, frame, x, y);
163 }
164 
166 {
167  int x, y;
168  unsigned char B, BL, BH;
169 
170  /* copy a block from the previous frame; need 1 more byte */
171  if (!s->is_16bpp) {
172  B = bytestream2_get_byte(&s->stream_ptr);
173  } else {
174  B = bytestream2_get_byte(&s->mv_ptr);
175  }
176 
177  BL = B & 0x0F;
178  BH = (B >> 4) & 0x0F;
179  x = -8 + BL;
180  y = -8 + BH;
181 
182  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
183  return copy_from(s, s->last_frame, frame, x, y);
184 }
185 
187 {
188  signed char x, y;
189 
190  /* copy a block from the previous frame using an expanded range;
191  * need 2 more bytes */
192  x = bytestream2_get_byte(&s->stream_ptr);
193  y = bytestream2_get_byte(&s->stream_ptr);
194 
195  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
196  return copy_from(s, s->last_frame, frame, x, y);
197 }
198 
200 {
201  /* mystery opcode? skip multiple blocks? */
202  av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
203 
204  /* report success */
205  return 0;
206 }
207 
209 {
210  int x, y;
211  unsigned char P[2];
212  unsigned int flags;
213 
214  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
215  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
216  return AVERROR_INVALIDDATA;
217  }
218 
219  /* 2-color encoding */
220  P[0] = bytestream2_get_byte(&s->stream_ptr);
221  P[1] = bytestream2_get_byte(&s->stream_ptr);
222 
223  if (P[0] <= P[1]) {
224 
225  /* need 8 more bytes from the stream */
226  for (y = 0; y < 8; y++) {
227  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
228  for (; flags != 1; flags >>= 1)
229  *s->pixel_ptr++ = P[flags & 1];
230  s->pixel_ptr += s->line_inc;
231  }
232 
233  } else {
234 
235  /* need 2 more bytes from the stream */
236  flags = bytestream2_get_le16(&s->stream_ptr);
237  for (y = 0; y < 8; y += 2) {
238  for (x = 0; x < 8; x += 2, flags >>= 1) {
239  s->pixel_ptr[x ] =
240  s->pixel_ptr[x + 1 ] =
241  s->pixel_ptr[x + s->stride] =
242  s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
243  }
244  s->pixel_ptr += s->stride * 2;
245  }
246  }
247 
248  /* report success */
249  return 0;
250 }
251 
253 {
254  int x, y;
255  unsigned char P[4];
256  unsigned int flags = 0;
257 
258  if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
259  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
260  return AVERROR_INVALIDDATA;
261  }
262 
263  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
264  * either top and bottom or left and right halves */
265  P[0] = bytestream2_get_byte(&s->stream_ptr);
266  P[1] = bytestream2_get_byte(&s->stream_ptr);
267 
268  if (P[0] <= P[1]) {
269  for (y = 0; y < 16; y++) {
270  // new values for each 4x4 block
271  if (!(y & 3)) {
272  if (y) {
273  P[0] = bytestream2_get_byte(&s->stream_ptr);
274  P[1] = bytestream2_get_byte(&s->stream_ptr);
275  }
276  flags = bytestream2_get_le16(&s->stream_ptr);
277  }
278 
279  for (x = 0; x < 4; x++, flags >>= 1)
280  *s->pixel_ptr++ = P[flags & 1];
281  s->pixel_ptr += s->stride - 4;
282  // switch to right half
283  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
284  }
285 
286  } else {
287  flags = bytestream2_get_le32(&s->stream_ptr);
288  P[2] = bytestream2_get_byte(&s->stream_ptr);
289  P[3] = bytestream2_get_byte(&s->stream_ptr);
290 
291  if (P[2] <= P[3]) {
292 
293  /* vertical split; left & right halves are 2-color encoded */
294 
295  for (y = 0; y < 16; y++) {
296  for (x = 0; x < 4; x++, flags >>= 1)
297  *s->pixel_ptr++ = P[flags & 1];
298  s->pixel_ptr += s->stride - 4;
299  // switch to right half
300  if (y == 7) {
301  s->pixel_ptr -= 8 * s->stride - 4;
302  P[0] = P[2];
303  P[1] = P[3];
304  flags = bytestream2_get_le32(&s->stream_ptr);
305  }
306  }
307 
308  } else {
309 
310  /* horizontal split; top & bottom halves are 2-color encoded */
311 
312  for (y = 0; y < 8; y++) {
313  if (y == 4) {
314  P[0] = P[2];
315  P[1] = P[3];
316  flags = bytestream2_get_le32(&s->stream_ptr);
317  }
318 
319  for (x = 0; x < 8; x++, flags >>= 1)
320  *s->pixel_ptr++ = P[flags & 1];
321  s->pixel_ptr += s->line_inc;
322  }
323  }
324  }
325 
326  /* report success */
327  return 0;
328 }
329 
331 {
332  int x, y;
333  unsigned char P[4];
334 
335  if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
336  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
337  return AVERROR_INVALIDDATA;
338  }
339 
340  /* 4-color encoding */
341  bytestream2_get_buffer(&s->stream_ptr, P, 4);
342 
343  if (P[0] <= P[1]) {
344  if (P[2] <= P[3]) {
345 
346  /* 1 of 4 colors for each pixel, need 16 more bytes */
347  for (y = 0; y < 8; y++) {
348  /* get the next set of 8 2-bit flags */
349  int flags = bytestream2_get_le16(&s->stream_ptr);
350  for (x = 0; x < 8; x++, flags >>= 2)
351  *s->pixel_ptr++ = P[flags & 0x03];
352  s->pixel_ptr += s->line_inc;
353  }
354 
355  } else {
356  uint32_t flags;
357 
358  /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
359  flags = bytestream2_get_le32(&s->stream_ptr);
360 
361  for (y = 0; y < 8; y += 2) {
362  for (x = 0; x < 8; x += 2, flags >>= 2) {
363  s->pixel_ptr[x ] =
364  s->pixel_ptr[x + 1 ] =
365  s->pixel_ptr[x + s->stride] =
366  s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
367  }
368  s->pixel_ptr += s->stride * 2;
369  }
370 
371  }
372  } else {
373  uint64_t flags;
374 
375  /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
376  flags = bytestream2_get_le64(&s->stream_ptr);
377  if (P[2] <= P[3]) {
378  for (y = 0; y < 8; y++) {
379  for (x = 0; x < 8; x += 2, flags >>= 2) {
380  s->pixel_ptr[x ] =
381  s->pixel_ptr[x + 1] = P[flags & 0x03];
382  }
383  s->pixel_ptr += s->stride;
384  }
385  } else {
386  for (y = 0; y < 8; y += 2) {
387  for (x = 0; x < 8; x++, flags >>= 2) {
388  s->pixel_ptr[x ] =
389  s->pixel_ptr[x + s->stride] = P[flags & 0x03];
390  }
391  s->pixel_ptr += s->stride * 2;
392  }
393  }
394  }
395 
396  /* report success */
397  return 0;
398 }
399 
401 {
402  int x, y;
403  unsigned char P[8];
404  int flags = 0;
405 
406  if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
407  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
408  return AVERROR_INVALIDDATA;
409  }
410 
411  bytestream2_get_buffer(&s->stream_ptr, P, 4);
412 
413  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
414  * either top and bottom or left and right halves */
415  if (P[0] <= P[1]) {
416 
417  /* 4-color encoding for each quadrant; need 32 bytes */
418  for (y = 0; y < 16; y++) {
419  // new values for each 4x4 block
420  if (!(y & 3)) {
421  if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
422  flags = bytestream2_get_le32(&s->stream_ptr);
423  }
424 
425  for (x = 0; x < 4; x++, flags >>= 2)
426  *s->pixel_ptr++ = P[flags & 0x03];
427 
428  s->pixel_ptr += s->stride - 4;
429  // switch to right half
430  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
431  }
432 
433  } else {
434  // vertical split?
435  int vert;
436  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
437 
438  bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
439  vert = P[4] <= P[5];
440 
441  /* 4-color encoding for either left and right or top and bottom
442  * halves */
443 
444  for (y = 0; y < 16; y++) {
445  for (x = 0; x < 4; x++, flags >>= 2)
446  *s->pixel_ptr++ = P[flags & 0x03];
447 
448  if (vert) {
449  s->pixel_ptr += s->stride - 4;
450  // switch to right half
451  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
452  } else if (y & 1) s->pixel_ptr += s->line_inc;
453 
454  // load values for second half
455  if (y == 7) {
456  memcpy(P, P + 4, 4);
457  flags = bytestream2_get_le64(&s->stream_ptr);
458  }
459  }
460  }
461 
462  /* report success */
463  return 0;
464 }
465 
467 {
468  int y;
469 
470  /* 64-color encoding (each pixel in block is a different color) */
471  for (y = 0; y < 8; y++) {
472  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
473  s->pixel_ptr += s->stride;
474  }
475 
476  /* report success */
477  return 0;
478 }
479 
481 {
482  int x, y;
483 
484  /* 16-color block encoding: each 2x2 block is a different color */
485  for (y = 0; y < 8; y += 2) {
486  for (x = 0; x < 8; x += 2) {
487  s->pixel_ptr[x ] =
488  s->pixel_ptr[x + 1 ] =
489  s->pixel_ptr[x + s->stride] =
490  s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
491  }
492  s->pixel_ptr += s->stride * 2;
493  }
494 
495  /* report success */
496  return 0;
497 }
498 
500 {
501  int y;
502  unsigned char P[2];
503 
504  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
505  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
506  return AVERROR_INVALIDDATA;
507  }
508 
509  /* 4-color block encoding: each 4x4 block is a different color */
510  for (y = 0; y < 8; y++) {
511  if (!(y & 3)) {
512  P[0] = bytestream2_get_byte(&s->stream_ptr);
513  P[1] = bytestream2_get_byte(&s->stream_ptr);
514  }
515  memset(s->pixel_ptr, P[0], 4);
516  memset(s->pixel_ptr + 4, P[1], 4);
517  s->pixel_ptr += s->stride;
518  }
519 
520  /* report success */
521  return 0;
522 }
523 
525 {
526  int y;
527  unsigned char pix;
528 
529  /* 1-color encoding: the whole block is 1 solid color */
530  pix = bytestream2_get_byte(&s->stream_ptr);
531 
532  for (y = 0; y < 8; y++) {
533  memset(s->pixel_ptr, pix, 8);
534  s->pixel_ptr += s->stride;
535  }
536 
537  /* report success */
538  return 0;
539 }
540 
542 {
543  int x, y;
544  unsigned char sample[2];
545 
546  /* dithered encoding */
547  sample[0] = bytestream2_get_byte(&s->stream_ptr);
548  sample[1] = bytestream2_get_byte(&s->stream_ptr);
549 
550  for (y = 0; y < 8; y++) {
551  for (x = 0; x < 8; x += 2) {
552  *s->pixel_ptr++ = sample[ y & 1 ];
553  *s->pixel_ptr++ = sample[!(y & 1)];
554  }
555  s->pixel_ptr += s->line_inc;
556  }
557 
558  /* report success */
559  return 0;
560 }
561 
563 {
564  signed char x, y;
565 
566  /* copy a block from the second last frame using an expanded range */
567  x = bytestream2_get_byte(&s->stream_ptr);
568  y = bytestream2_get_byte(&s->stream_ptr);
569 
570  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
571  return copy_from(s, s->second_last_frame, frame, x, y);
572 }
573 
575 {
576  int x, y;
577  uint16_t P[2];
578  unsigned int flags;
579  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
580 
581  /* 2-color encoding */
582  P[0] = bytestream2_get_le16(&s->stream_ptr);
583  P[1] = bytestream2_get_le16(&s->stream_ptr);
584 
585  if (!(P[0] & 0x8000)) {
586 
587  for (y = 0; y < 8; y++) {
588  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
589  for (; flags != 1; flags >>= 1)
590  *pixel_ptr++ = P[flags & 1];
591  pixel_ptr += s->line_inc;
592  }
593 
594  } else {
595 
596  flags = bytestream2_get_le16(&s->stream_ptr);
597  for (y = 0; y < 8; y += 2) {
598  for (x = 0; x < 8; x += 2, flags >>= 1) {
599  pixel_ptr[x ] =
600  pixel_ptr[x + 1 ] =
601  pixel_ptr[x + s->stride] =
602  pixel_ptr[x + 1 + s->stride] = P[flags & 1];
603  }
604  pixel_ptr += s->stride * 2;
605  }
606  }
607 
608  return 0;
609 }
610 
612 {
613  int x, y;
614  uint16_t P[4];
615  unsigned int flags = 0;
616  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
617 
618  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
619  * either top and bottom or left and right halves */
620  P[0] = bytestream2_get_le16(&s->stream_ptr);
621  P[1] = bytestream2_get_le16(&s->stream_ptr);
622 
623  if (!(P[0] & 0x8000)) {
624 
625  for (y = 0; y < 16; y++) {
626  // new values for each 4x4 block
627  if (!(y & 3)) {
628  if (y) {
629  P[0] = bytestream2_get_le16(&s->stream_ptr);
630  P[1] = bytestream2_get_le16(&s->stream_ptr);
631  }
632  flags = bytestream2_get_le16(&s->stream_ptr);
633  }
634 
635  for (x = 0; x < 4; x++, flags >>= 1)
636  *pixel_ptr++ = P[flags & 1];
637  pixel_ptr += s->stride - 4;
638  // switch to right half
639  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
640  }
641 
642  } else {
643 
644  flags = bytestream2_get_le32(&s->stream_ptr);
645  P[2] = bytestream2_get_le16(&s->stream_ptr);
646  P[3] = bytestream2_get_le16(&s->stream_ptr);
647 
648  if (!(P[2] & 0x8000)) {
649 
650  /* vertical split; left & right halves are 2-color encoded */
651 
652  for (y = 0; y < 16; y++) {
653  for (x = 0; x < 4; x++, flags >>= 1)
654  *pixel_ptr++ = P[flags & 1];
655  pixel_ptr += s->stride - 4;
656  // switch to right half
657  if (y == 7) {
658  pixel_ptr -= 8 * s->stride - 4;
659  P[0] = P[2];
660  P[1] = P[3];
661  flags = bytestream2_get_le32(&s->stream_ptr);
662  }
663  }
664 
665  } else {
666 
667  /* horizontal split; top & bottom halves are 2-color encoded */
668 
669  for (y = 0; y < 8; y++) {
670  if (y == 4) {
671  P[0] = P[2];
672  P[1] = P[3];
673  flags = bytestream2_get_le32(&s->stream_ptr);
674  }
675 
676  for (x = 0; x < 8; x++, flags >>= 1)
677  *pixel_ptr++ = P[flags & 1];
678  pixel_ptr += s->line_inc;
679  }
680  }
681  }
682 
683  /* report success */
684  return 0;
685 }
686 
688 {
689  int x, y;
690  uint16_t P[4];
691  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
692 
693  /* 4-color encoding */
694  for (x = 0; x < 4; x++)
695  P[x] = bytestream2_get_le16(&s->stream_ptr);
696 
697  if (!(P[0] & 0x8000)) {
698  if (!(P[2] & 0x8000)) {
699 
700  /* 1 of 4 colors for each pixel */
701  for (y = 0; y < 8; y++) {
702  /* get the next set of 8 2-bit flags */
703  int flags = bytestream2_get_le16(&s->stream_ptr);
704  for (x = 0; x < 8; x++, flags >>= 2)
705  *pixel_ptr++ = P[flags & 0x03];
706  pixel_ptr += s->line_inc;
707  }
708 
709  } else {
710  uint32_t flags;
711 
712  /* 1 of 4 colors for each 2x2 block */
713  flags = bytestream2_get_le32(&s->stream_ptr);
714 
715  for (y = 0; y < 8; y += 2) {
716  for (x = 0; x < 8; x += 2, flags >>= 2) {
717  pixel_ptr[x ] =
718  pixel_ptr[x + 1 ] =
719  pixel_ptr[x + s->stride] =
720  pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
721  }
722  pixel_ptr += s->stride * 2;
723  }
724 
725  }
726  } else {
727  uint64_t flags;
728 
729  /* 1 of 4 colors for each 2x1 or 1x2 block */
730  flags = bytestream2_get_le64(&s->stream_ptr);
731  if (!(P[2] & 0x8000)) {
732  for (y = 0; y < 8; y++) {
733  for (x = 0; x < 8; x += 2, flags >>= 2) {
734  pixel_ptr[x ] =
735  pixel_ptr[x + 1] = P[flags & 0x03];
736  }
737  pixel_ptr += s->stride;
738  }
739  } else {
740  for (y = 0; y < 8; y += 2) {
741  for (x = 0; x < 8; x++, flags >>= 2) {
742  pixel_ptr[x ] =
743  pixel_ptr[x + s->stride] = P[flags & 0x03];
744  }
745  pixel_ptr += s->stride * 2;
746  }
747  }
748  }
749 
750  /* report success */
751  return 0;
752 }
753 
755 {
756  int x, y;
757  uint16_t P[8];
758  int flags = 0;
759  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
760 
761  for (x = 0; x < 4; x++)
762  P[x] = bytestream2_get_le16(&s->stream_ptr);
763 
764  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
765  * either top and bottom or left and right halves */
766  if (!(P[0] & 0x8000)) {
767 
768  /* 4-color encoding for each quadrant */
769  for (y = 0; y < 16; y++) {
770  // new values for each 4x4 block
771  if (!(y & 3)) {
772  if (y)
773  for (x = 0; x < 4; x++)
774  P[x] = bytestream2_get_le16(&s->stream_ptr);
775  flags = bytestream2_get_le32(&s->stream_ptr);
776  }
777 
778  for (x = 0; x < 4; x++, flags >>= 2)
779  *pixel_ptr++ = P[flags & 0x03];
780 
781  pixel_ptr += s->stride - 4;
782  // switch to right half
783  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
784  }
785 
786  } else {
787  // vertical split?
788  int vert;
789  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
790 
791  for (x = 4; x < 8; x++)
792  P[x] = bytestream2_get_le16(&s->stream_ptr);
793  vert = !(P[4] & 0x8000);
794 
795  /* 4-color encoding for either left and right or top and bottom
796  * halves */
797 
798  for (y = 0; y < 16; y++) {
799  for (x = 0; x < 4; x++, flags >>= 2)
800  *pixel_ptr++ = P[flags & 0x03];
801 
802  if (vert) {
803  pixel_ptr += s->stride - 4;
804  // switch to right half
805  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
806  } else if (y & 1) pixel_ptr += s->line_inc;
807 
808  // load values for second half
809  if (y == 7) {
810  memcpy(P, P + 4, 8);
811  flags = bytestream2_get_le64(&s->stream_ptr);
812  }
813  }
814  }
815 
816  /* report success */
817  return 0;
818 }
819 
821 {
822  int x, y;
823  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
824 
825  /* 64-color encoding (each pixel in block is a different color) */
826  for (y = 0; y < 8; y++) {
827  for (x = 0; x < 8; x++)
828  pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
829  pixel_ptr += s->stride;
830  }
831 
832  /* report success */
833  return 0;
834 }
835 
837 {
838  int x, y;
839  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
840 
841  /* 16-color block encoding: each 2x2 block is a different color */
842  for (y = 0; y < 8; y += 2) {
843  for (x = 0; x < 8; x += 2) {
844  pixel_ptr[x ] =
845  pixel_ptr[x + 1 ] =
846  pixel_ptr[x + s->stride] =
847  pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
848  }
849  pixel_ptr += s->stride * 2;
850  }
851 
852  /* report success */
853  return 0;
854 }
855 
857 {
858  int x, y;
859  uint16_t P[2];
860  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
861 
862  /* 4-color block encoding: each 4x4 block is a different color */
863  for (y = 0; y < 8; y++) {
864  if (!(y & 3)) {
865  P[0] = bytestream2_get_le16(&s->stream_ptr);
866  P[1] = bytestream2_get_le16(&s->stream_ptr);
867  }
868  for (x = 0; x < 8; x++)
869  pixel_ptr[x] = P[x >> 2];
870  pixel_ptr += s->stride;
871  }
872 
873  /* report success */
874  return 0;
875 }
876 
878 {
879  int x, y;
880  uint16_t pix;
881  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
882 
883  /* 1-color encoding: the whole block is 1 solid color */
884  pix = bytestream2_get_le16(&s->stream_ptr);
885 
886  for (y = 0; y < 8; y++) {
887  for (x = 0; x < 8; x++)
888  pixel_ptr[x] = pix;
889  pixel_ptr += s->stride;
890  }
891 
892  /* report success */
893  return 0;
894 }
895 
896 static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
905 };
906 
907 static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
916 };
917 
919 {
920  int line;
921 
922  if (!opcode) {
923  for (line = 0; line < 8; ++line) {
924  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
925  s->pixel_ptr += s->stride;
926  }
927  } else {
928  /* Don't try to copy second_last_frame data on the first frames */
929  if (s->avctx->frame_num > 2)
930  copy_from(s, s->second_last_frame, frame, 0, 0);
931  }
932 }
933 
935 {
936  int off_x, off_y;
937 
938  if (opcode < 0) {
939  off_x = ((uint16_t)opcode - 0xC000) % frame->width;
940  off_y = ((uint16_t)opcode - 0xC000) / frame->width;
941  copy_from(s, s->last_frame, frame, off_x, off_y);
942  } else if (opcode > 0) {
943  off_x = ((uint16_t)opcode - 0x4000) % frame->width;
944  off_y = ((uint16_t)opcode - 0x4000) / frame->width;
945  copy_from(s, frame, frame, off_x, off_y);
946  }
947 }
948 
949 static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
951 };
952 
954 {
955  int pass, x, y;
956  int16_t opcode;
957  GetByteContext decoding_map_ptr;
958 
959  /* this is PAL8, so make the palette available */
960  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
961  s->stride = frame->linesize[0];
962 
963  s->line_inc = s->stride - 8;
964  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
965  + (s->avctx->width - 8) * (1 + s->is_16bpp);
966 
967  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
968 
969  for (pass = 0; pass < 2; ++pass) {
970  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
971  for (y = 0; y < s->avctx->height; y += 8) {
972  for (x = 0; x < s->avctx->width; x += 8) {
973  opcode = bytestream2_get_le16(&decoding_map_ptr);
974 
975  ff_tlog(s->avctx,
976  " block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n",
977  x, y, opcode, bytestream2_tell(&s->stream_ptr));
978 
979  s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0];
980  ipvideo_format_06_passes[pass](s, frame, opcode);
981  }
982  }
983  }
984 
985  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
986  av_log(s->avctx, AV_LOG_DEBUG,
987  "decode finished with %d bytes left over\n",
988  bytestream2_get_bytes_left(&s->stream_ptr));
989  }
990 }
991 
993 {
994  int line;
995 
996  if (!opcode) {
997  for (line = 0; line < 8; ++line) {
998  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
999  s->pixel_ptr += s->stride;
1000  }
1001  }
1002 }
1003 
1005 {
1006  int off_x, off_y;
1007 
1008  if (opcode < 0) {
1009  off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->width;
1010  off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->width;
1011  copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y);
1012  } else if (opcode > 0) {
1013  off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->width;
1014  off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->width;
1015  copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y);
1016  }
1017 }
1018 
1019 static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
1021 };
1022 
1024 {
1025  int pass, x, y, changed_block;
1026  int16_t opcode, skip;
1027  GetByteContext decoding_map_ptr;
1028  GetByteContext skip_map_ptr;
1029 
1030  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1031 
1032  /* this is PAL8, so make the palette available */
1033  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1034  s->stride = frame->linesize[0];
1035 
1036  s->line_inc = s->stride - 8;
1037  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1038  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1039 
1040  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
1041  bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size);
1042 
1043  for (pass = 0; pass < 2; ++pass) {
1044  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
1045  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1046  skip = bytestream2_get_le16(&skip_map_ptr);
1047 
1048  for (y = 0; y < s->avctx->height; y += 8) {
1049  for (x = 0; x < s->avctx->width; x += 8) {
1050  s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0];
1051 
1052  while (skip <= 0) {
1053  if (skip != -0x8000 && skip) {
1054  opcode = bytestream2_get_le16(&decoding_map_ptr);
1055  ipvideo_format_10_passes[pass](s, frame, opcode);
1056  break;
1057  }
1058  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1059  return;
1060  skip = bytestream2_get_le16(&skip_map_ptr);
1061  }
1062  skip *= 2;
1063  }
1064  }
1065  }
1066 
1067  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1068  skip = bytestream2_get_le16(&skip_map_ptr);
1069  for (y = 0; y < s->avctx->height; y += 8) {
1070  for (x = 0; x < s->avctx->width; x += 8) {
1071  changed_block = 0;
1072  s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0];
1073 
1074  while (skip <= 0) {
1075  if (skip != -0x8000 && skip) {
1076  changed_block = 1;
1077  break;
1078  }
1079  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1080  return;
1081  skip = bytestream2_get_le16(&skip_map_ptr);
1082  }
1083 
1084  if (changed_block) {
1085  copy_from(s, s->cur_decode_frame, frame, 0, 0);
1086  } else {
1087  /* Don't try to copy last_frame data on the first frame */
1088  if (s->avctx->frame_num)
1089  copy_from(s, s->last_frame, frame, 0, 0);
1090  }
1091  skip *= 2;
1092  }
1093  }
1094 
1095  FFSWAP(AVFrame*, s->prev_decode_frame, s->cur_decode_frame);
1096 
1097  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1098  av_log(s->avctx, AV_LOG_DEBUG,
1099  "decode finished with %d bytes left over\n",
1100  bytestream2_get_bytes_left(&s->stream_ptr));
1101  }
1102 }
1103 
1105 {
1106  int x, y;
1107  unsigned char opcode;
1108  int ret;
1109  GetBitContext gb;
1110 
1111  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1112  if (!s->is_16bpp) {
1113  /* this is PAL8, so make the palette available */
1114  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1115 
1116  s->stride = frame->linesize[0];
1117  } else {
1118  s->stride = frame->linesize[0] >> 1;
1119  s->mv_ptr = s->stream_ptr;
1120  bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
1121  }
1122  s->line_inc = s->stride - 8;
1123  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1124  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1125 
1126  init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
1127  for (y = 0; y < s->avctx->height; y += 8) {
1128  for (x = 0; x < s->avctx->width; x += 8) {
1129  if (get_bits_left(&gb) < 4)
1130  return;
1131  opcode = get_bits(&gb, 4);
1132 
1133  ff_tlog(s->avctx,
1134  " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
1135  x, y, opcode, bytestream2_tell(&s->stream_ptr));
1136 
1137  if (!s->is_16bpp) {
1138  s->pixel_ptr = frame->data[0] + x
1139  + y*frame->linesize[0];
1140  ret = ipvideo_decode_block[opcode](s, frame);
1141  } else {
1142  s->pixel_ptr = frame->data[0] + x*2
1143  + y*frame->linesize[0];
1144  ret = ipvideo_decode_block16[opcode](s, frame);
1145  }
1146  if (ret != 0) {
1147  av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %"PRId64", @ block (%d, %d)\n",
1148  s->avctx->frame_num, x, y);
1149  return;
1150  }
1151  }
1152  }
1153  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1154  av_log(s->avctx, AV_LOG_DEBUG,
1155  "decode finished with %d bytes left over\n",
1156  bytestream2_get_bytes_left(&s->stream_ptr));
1157  }
1158 }
1159 
1161 {
1162  IpvideoContext *s = avctx->priv_data;
1163 
1164  s->avctx = avctx;
1165 
1166  s->is_16bpp = avctx->bits_per_coded_sample == 16;
1167  avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
1168 
1169  ff_hpeldsp_init(&s->hdsp, avctx->flags);
1170 
1171  s->last_frame = av_frame_alloc();
1172  s->second_last_frame = av_frame_alloc();
1173  s->cur_decode_frame = av_frame_alloc();
1174  s->prev_decode_frame = av_frame_alloc();
1175  if (!s->last_frame || !s->second_last_frame ||
1176  !s->cur_decode_frame || !s->prev_decode_frame) {
1177  return AVERROR(ENOMEM);
1178  }
1179 
1180  s->cur_decode_frame->width = avctx->width;
1181  s->prev_decode_frame->width = avctx->width;
1182  s->cur_decode_frame->height = avctx->height;
1183  s->prev_decode_frame->height = avctx->height;
1184  s->cur_decode_frame->format = avctx->pix_fmt;
1185  s->prev_decode_frame->format = avctx->pix_fmt;
1186 
1187  return 0;
1188 }
1189 
1191  int *got_frame, AVPacket *avpkt)
1192 {
1193  const uint8_t *buf = avpkt->data;
1194  int buf_size = avpkt->size;
1195  IpvideoContext *s = avctx->priv_data;
1196  int ret;
1197  int send_buffer;
1198  int frame_format;
1199  int video_data_size;
1200 
1202  av_frame_unref(s->last_frame);
1203  av_frame_unref(s->second_last_frame);
1204  av_frame_unref(s->cur_decode_frame);
1205  av_frame_unref(s->prev_decode_frame);
1206  }
1207 
1208  if (!s->cur_decode_frame->data[0]) {
1209  ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
1210  if (ret < 0)
1211  return ret;
1212 
1213  ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
1214  if (ret < 0) {
1215  av_frame_unref(s->cur_decode_frame);
1216  return ret;
1217  }
1218  }
1219 
1220  if (buf_size < 8)
1221  return AVERROR_INVALIDDATA;
1222 
1223  frame_format = AV_RL8(buf);
1224  send_buffer = AV_RL8(buf + 1);
1225  video_data_size = AV_RL16(buf + 2);
1226  s->decoding_map_size = AV_RL16(buf + 4);
1227  s->skip_map_size = AV_RL16(buf + 6);
1228 
1229  switch (frame_format) {
1230  case 0x06:
1231  if (s->decoding_map_size) {
1232  av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n");
1233  return AVERROR_INVALIDDATA;
1234  }
1235 
1236  if (s->skip_map_size) {
1237  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n");
1238  return AVERROR_INVALIDDATA;
1239  }
1240 
1241  if (s->is_16bpp) {
1242  av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n");
1243  return AVERROR_INVALIDDATA;
1244  }
1245 
1246  /* Decoding map for 0x06 frame format is at the top of pixeldata */
1247  s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2;
1248  s->decoding_map = buf + 8 + 14; /* 14 bits of op data */
1249  video_data_size -= s->decoding_map_size + 14;
1250  if (video_data_size <= 0 || s->decoding_map_size == 0)
1251  return AVERROR_INVALIDDATA;
1252 
1253  if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size)
1254  return AVERROR_INVALIDDATA;
1255 
1256  bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size);
1257 
1258  break;
1259 
1260  case 0x10:
1261  if (! s->decoding_map_size) {
1262  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n");
1263  return AVERROR_INVALIDDATA;
1264  }
1265 
1266  if (! s->skip_map_size) {
1267  av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n");
1268  return AVERROR_INVALIDDATA;
1269  }
1270 
1271  if (s->is_16bpp) {
1272  av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n");
1273  return AVERROR_INVALIDDATA;
1274  }
1275 
1276  if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size)
1277  return AVERROR_INVALIDDATA;
1278 
1279  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1280  s->decoding_map = buf + 8 + video_data_size;
1281  s->skip_map = buf + 8 + video_data_size + s->decoding_map_size;
1282 
1283  break;
1284 
1285  case 0x11:
1286  if (! s->decoding_map_size) {
1287  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n");
1288  return AVERROR_INVALIDDATA;
1289  }
1290 
1291  if (s->skip_map_size) {
1292  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n");
1293  return AVERROR_INVALIDDATA;
1294  }
1295 
1296  if (buf_size < 8 + video_data_size + s->decoding_map_size)
1297  return AVERROR_INVALIDDATA;
1298 
1299  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1300  s->decoding_map = buf + 8 + video_data_size;
1301 
1302  break;
1303 
1304  default:
1305  av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format);
1306  }
1307 
1308  /* ensure we can't overread the packet */
1309  if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) {
1310  av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n");
1311  return AVERROR_INVALIDDATA;
1312  }
1313 
1314  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1315  return ret;
1316 
1317  if (!s->is_16bpp) {
1318 #if FF_API_PALETTE_HAS_CHANGED
1320  frame->palette_has_changed =
1321 #endif
1322  ff_copy_palette(s->pal, avpkt, avctx);
1323 #if FF_API_PALETTE_HAS_CHANGED
1325 #endif
1326  }
1327 
1328  switch (frame_format) {
1329  case 0x06:
1331  break;
1332  case 0x10:
1334  break;
1335  case 0x11:
1337  break;
1338  }
1339 
1340  *got_frame = send_buffer;
1341 
1342  /* shuffle frames */
1343  FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
1344  if ((ret = av_frame_replace(s->last_frame, frame)) < 0)
1345  return ret;
1346 
1347  /* report that the buffer was completely consumed */
1348  return buf_size;
1349 }
1350 
1352 {
1353  IpvideoContext *s = avctx->priv_data;
1354 
1355  av_frame_free(&s->last_frame);
1356  av_frame_free(&s->second_last_frame);
1357  av_frame_free(&s->cur_decode_frame);
1358  av_frame_free(&s->prev_decode_frame);
1359 
1360  return 0;
1361 }
1362 
1364  .p.name = "interplayvideo",
1365  CODEC_LONG_NAME("Interplay MVE video"),
1366  .p.type = AVMEDIA_TYPE_VIDEO,
1368  .priv_data_size = sizeof(IpvideoContext),
1370  .close = ipvideo_decode_end,
1372  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
1373  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
1374 };
IpvideoContext::decoding_map
const unsigned char * decoding_map
Definition: interplayvideo.c:63
ipvideo_decode_init
static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
Definition: interplayvideo.c:1160
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
IpvideoContext::is_16bpp
int is_16bpp
Definition: interplayvideo.c:68
ipvideo_decode_block_opcode_0xB
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:466
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:43
ipvideo_decode_block_opcode_0x3
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:139
IpvideoContext::skip_map_size
int skip_map_size
Definition: interplayvideo.c:66
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:695
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ipvideo_format_10_passes
static void(*const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
Definition: interplayvideo.c:1019
GetByteContext
Definition: bytestream.h:33
ipvideo_decode_block_opcode_0x2
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:115
ipvideo_decode_block_opcode_0xC
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:480
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
ipvideo_decode_block_opcode_0x9
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:330
bytestream2_seek
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
AVPacket::data
uint8_t * data
Definition: packet.h:539
IpvideoContext::mv_ptr
GetByteContext mv_ptr
Definition: interplayvideo.c:69
IpvideoContext::cur_decode_frame
AVFrame * cur_decode_frame
Definition: interplayvideo.c:60
FFCodec
Definition: codec_internal.h:127
ipvideo_format_10_secondpass
static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:1004
AV_RL8
#define AV_RL8(x)
Definition: intreadwrite.h:394
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:514
ipvideo_format_06_passes
static void(*const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
Definition: interplayvideo.c:949
ipvideo_decode_block
static int(*const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:896
ipvideo_decode_block_opcode_0x7_16
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:574
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:335
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
GetBitContext
Definition: get_bits.h:108
BL
#define BL(type, name)
Definition: vf_shear.c:161
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:508
ipvideo_decode_format_10_opcodes
static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:1023
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
IpvideoContext::prev_decode_frame
AVFrame * prev_decode_frame
Definition: interplayvideo.c:61
AV_PKT_DATA_PARAM_CHANGE
@ AV_PKT_DATA_PARAM_CHANGE
An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows:
Definition: packet.h:69
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
av_cold
#define av_cold
Definition: attributes.h:90
ipvideo_decode_block_opcode_0x8_16
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:611
ipvideo_decode_block_opcode_0x4
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:165
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:311
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
IpvideoContext::skip_map
const unsigned char * skip_map
Definition: interplayvideo.c:65
ipvideo_decode_frame
static int ipvideo_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: interplayvideo.c:1190
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:431
op
static int op(uint8_t **dst, const uint8_t *dst_end, GetByteContext *gb, int pixel, int count, int *x, int width, int linesize)
Perform decode operation.
Definition: anm.c:76
B
#define B
Definition: huffyuv.h:42
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:230
decode.h
get_bits.h
AV_RL16
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:94
IpvideoContext::decoding_map_size
int decoding_map_size
Definition: interplayvideo.c:64
IpvideoContext::upper_motion_limit_offset
int upper_motion_limit_offset
Definition: interplayvideo.c:73
ipvideo_decode_block_opcode_0x6
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:199
ipvideo_decode_block_opcode_0x9_16
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:687
IpvideoContext::stream_ptr
GetByteContext stream_ptr
Definition: interplayvideo.c:69
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:296
ff_hpeldsp_init
av_cold void ff_hpeldsp_init(HpelDSPContext *c, int flags)
Definition: hpeldsp.c:338
ipvideo_decode_block_opcode_0xA
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:400
ipvideo_decode_block_opcode_0x1
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:110
if
if(ret)
Definition: filter_design.txt:179
ipvideo_format_10_firstpass
static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:992
NULL
#define NULL
Definition: coverity.c:32
AV_CODEC_ID_INTERPLAY_VIDEO
@ AV_CODEC_ID_INTERPLAY_VIDEO
Definition: codec_id.h:91
IpvideoContext::stride
int stride
Definition: interplayvideo.c:72
ipvideo_decode_block_opcode_0x6_16
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:562
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
ipvideo_decode_format_11_opcodes
static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:1104
bytestream2_get_buffer
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:267
ipvideo_format_06_firstpass
static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:918
ipvideo_decode_block_opcode_0xE
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:524
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
ipvideo_decode_end
static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
Definition: interplayvideo.c:1351
IpvideoContext::second_last_frame
AVFrame * second_last_frame
Definition: interplayvideo.c:56
IpvideoContext::hdsp
HpelDSPContext hdsp
Definition: interplayvideo.c:55
HpelDSPContext
Half-pel DSP context.
Definition: hpeldsp.h:45
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1697
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
ipvideo_decode_block_opcode_0x0
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:105
ipvideo_decode_block_opcode_0x8
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:252
AVPacket::size
int size
Definition: packet.h:540
codec_internal.h
P
#define P
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
sample
#define sample
Definition: flacdsp_template.c:44
ipvideo_format_06_secondpass
static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:934
line
Definition: graph2dot.c:48
ipvideo_decode_block_opcode_0x5
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:186
ipvideo_decode_block_opcode_0xB_16
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:820
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1578
copy_from
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
Definition: interplayvideo.c:78
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: packet.c:252
ipvideo_decode_block_opcode_0xC_16
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:836
AV_PIX_FMT_RGB555
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:490
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:610
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
IpvideoContext::avctx
AVCodecContext * avctx
Definition: interplayvideo.c:54
AVCodecContext::height
int height
Definition: avcodec.h:624
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:663
ipvideo_decode_block_opcode_0xF
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:541
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:487
AVCodecContext
main external API structure.
Definition: avcodec.h:451
ipvideo_decode_block_opcode_0x7
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:208
ipvideo_decode_format_06_opcodes
static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:953
IpvideoContext::pal
uint32_t pal[256]
Definition: interplayvideo.c:75
AV_CODEC_CAP_PARAM_CHANGE
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: codec.h:118
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
ff_interplay_video_decoder
const FFCodec ff_interplay_video_decoder
Definition: interplayvideo.c:1363
ipvideo_decode_block_opcode_0xE_16
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:877
IpvideoContext::last_frame
AVFrame * last_frame
Definition: interplayvideo.c:57
ff_tlog
#define ff_tlog(ctx,...)
Definition: internal.h:141
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
ipvideo_decode_block16
static int(*const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:907
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:624
bytestream.h
hpeldsp.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
ipvideo_decode_block_opcode_0xD
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:499
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
IpvideoContext::line_inc
int line_inc
Definition: interplayvideo.c:71
IpvideoContext::pixel_ptr
unsigned char * pixel_ptr
Definition: interplayvideo.c:70
ff_copy_palette
int ff_copy_palette(void *dst, const AVPacket *src, void *logctx)
Check whether the side-data of src contains a palette of size AVPALETTE_SIZE; if so,...
Definition: decode.c:2236
width
#define width
Definition: dsp.h:85
ipvideo_decode_block_opcode_0xD_16
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:856
IpvideoContext
Definition: interplayvideo.c:52
skip
static void BS_FUNC() skip(BSCTX *bc, unsigned int n)
Skip n bits in the buffer.
Definition: bitstream_template.h:375
src
#define src
Definition: vp8dsp.c:248
line
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted line
Definition: swscale.txt:40
ipvideo_decode_block_opcode_0xA_16
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:754