FFmpeg
interplayvideo.c
Go to the documentation of this file.
1 /*
2  * Interplay MVE Video Decoder
3  * Copyright (C) 2003 The FFmpeg project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the Interplay MVE format, visit:
26  * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27  * This code is written in such a way that the identifiers match up
28  * with the encoding descriptions in the document.
29  *
30  * This decoder presently only supports a PAL8 output colorspace.
31  *
32  * An Interplay video frame consists of 2 parts: The decoding map and
33  * the video data. A demuxer must load these 2 parts together in a single
34  * buffer before sending it through the stream to this decoder.
35  */
36 
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
40 
41 #include "libavutil/intreadwrite.h"
42 
43 #define BITSTREAM_READER_LE
44 #include "avcodec.h"
45 #include "bytestream.h"
46 #include "decode.h"
47 #include "get_bits.h"
48 #include "hpeldsp.h"
49 #include "internal.h"
50 
51 #define PALETTE_COUNT 256
52 
53 typedef struct IpvideoContext {
54 
59 
60  /* For format 0x10 */
63 
64  const unsigned char *decoding_map;
66  const unsigned char *skip_map;
68 
69  int is_16bpp;
71  unsigned char *pixel_ptr;
72  int line_inc;
73  int stride;
75 
76  uint32_t pal[256];
78 
79 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
80 {
81  int width = dst->width;
82  int current_offset = s->pixel_ptr - dst->data[0];
83  int x = (current_offset % dst->linesize[0]) / (1 + s->is_16bpp);
84  int y = current_offset / dst->linesize[0];
85  int dx = delta_x + x - ((delta_x + x >= width) - (delta_x + x < 0)) * width;
86  int dy = delta_y + y + (delta_x + x >= width) - (delta_x + x < 0);
87  int motion_offset = dy * src->linesize[0] + dx * (1 + s->is_16bpp);
88 
89  if (motion_offset < 0) {
90  av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
91  return AVERROR_INVALIDDATA;
92  } else if (motion_offset > s->upper_motion_limit_offset) {
93  av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
94  motion_offset, s->upper_motion_limit_offset);
95  return AVERROR_INVALIDDATA;
96  }
97  if (!src->data[0]) {
98  av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
99  return AVERROR(EINVAL);
100  }
101  s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
102  dst->linesize[0], 8);
103  return 0;
104 }
105 
107 {
108  return copy_from(s, s->last_frame, frame, 0, 0);
109 }
110 
112 {
113  return copy_from(s, s->second_last_frame, frame, 0, 0);
114 }
115 
117 {
118  unsigned char B;
119  int x, y;
120 
121  /* copy block from 2 frames ago using a motion vector; need 1 more byte */
122  if (!s->is_16bpp) {
123  B = bytestream2_get_byte(&s->stream_ptr);
124  } else {
125  B = bytestream2_get_byte(&s->mv_ptr);
126  }
127 
128  if (B < 56) {
129  x = 8 + (B % 7);
130  y = B / 7;
131  } else {
132  x = -14 + ((B - 56) % 29);
133  y = 8 + ((B - 56) / 29);
134  }
135 
136  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
137  return copy_from(s, s->second_last_frame, frame, x, y);
138 }
139 
141 {
142  unsigned char B;
143  int x, y;
144 
145  /* copy 8x8 block from current frame from an up/left block */
146 
147  /* need 1 more byte for motion */
148  if (!s->is_16bpp) {
149  B = bytestream2_get_byte(&s->stream_ptr);
150  } else {
151  B = bytestream2_get_byte(&s->mv_ptr);
152  }
153 
154  if (B < 56) {
155  x = -(8 + (B % 7));
156  y = -(B / 7);
157  } else {
158  x = -(-14 + ((B - 56) % 29));
159  y = -( 8 + ((B - 56) / 29));
160  }
161 
162  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
163  return copy_from(s, frame, frame, x, y);
164 }
165 
167 {
168  int x, y;
169  unsigned char B, BL, BH;
170 
171  /* copy a block from the previous frame; need 1 more byte */
172  if (!s->is_16bpp) {
173  B = bytestream2_get_byte(&s->stream_ptr);
174  } else {
175  B = bytestream2_get_byte(&s->mv_ptr);
176  }
177 
178  BL = B & 0x0F;
179  BH = (B >> 4) & 0x0F;
180  x = -8 + BL;
181  y = -8 + BH;
182 
183  ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
184  return copy_from(s, s->last_frame, frame, x, y);
185 }
186 
188 {
189  signed char x, y;
190 
191  /* copy a block from the previous frame using an expanded range;
192  * need 2 more bytes */
193  x = bytestream2_get_byte(&s->stream_ptr);
194  y = bytestream2_get_byte(&s->stream_ptr);
195 
196  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
197  return copy_from(s, s->last_frame, frame, x, y);
198 }
199 
201 {
202  /* mystery opcode? skip multiple blocks? */
203  av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
204 
205  /* report success */
206  return 0;
207 }
208 
210 {
211  int x, y;
212  unsigned char P[2];
213  unsigned int flags;
214 
215  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
216  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
217  return AVERROR_INVALIDDATA;
218  }
219 
220  /* 2-color encoding */
221  P[0] = bytestream2_get_byte(&s->stream_ptr);
222  P[1] = bytestream2_get_byte(&s->stream_ptr);
223 
224  if (P[0] <= P[1]) {
225 
226  /* need 8 more bytes from the stream */
227  for (y = 0; y < 8; y++) {
228  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
229  for (; flags != 1; flags >>= 1)
230  *s->pixel_ptr++ = P[flags & 1];
231  s->pixel_ptr += s->line_inc;
232  }
233 
234  } else {
235 
236  /* need 2 more bytes from the stream */
237  flags = bytestream2_get_le16(&s->stream_ptr);
238  for (y = 0; y < 8; y += 2) {
239  for (x = 0; x < 8; x += 2, flags >>= 1) {
240  s->pixel_ptr[x ] =
241  s->pixel_ptr[x + 1 ] =
242  s->pixel_ptr[x + s->stride] =
243  s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
244  }
245  s->pixel_ptr += s->stride * 2;
246  }
247  }
248 
249  /* report success */
250  return 0;
251 }
252 
254 {
255  int x, y;
256  unsigned char P[4];
257  unsigned int flags = 0;
258 
259  if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
260  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
261  return AVERROR_INVALIDDATA;
262  }
263 
264  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
265  * either top and bottom or left and right halves */
266  P[0] = bytestream2_get_byte(&s->stream_ptr);
267  P[1] = bytestream2_get_byte(&s->stream_ptr);
268 
269  if (P[0] <= P[1]) {
270  for (y = 0; y < 16; y++) {
271  // new values for each 4x4 block
272  if (!(y & 3)) {
273  if (y) {
274  P[0] = bytestream2_get_byte(&s->stream_ptr);
275  P[1] = bytestream2_get_byte(&s->stream_ptr);
276  }
277  flags = bytestream2_get_le16(&s->stream_ptr);
278  }
279 
280  for (x = 0; x < 4; x++, flags >>= 1)
281  *s->pixel_ptr++ = P[flags & 1];
282  s->pixel_ptr += s->stride - 4;
283  // switch to right half
284  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
285  }
286 
287  } else {
288  flags = bytestream2_get_le32(&s->stream_ptr);
289  P[2] = bytestream2_get_byte(&s->stream_ptr);
290  P[3] = bytestream2_get_byte(&s->stream_ptr);
291 
292  if (P[2] <= P[3]) {
293 
294  /* vertical split; left & right halves are 2-color encoded */
295 
296  for (y = 0; y < 16; y++) {
297  for (x = 0; x < 4; x++, flags >>= 1)
298  *s->pixel_ptr++ = P[flags & 1];
299  s->pixel_ptr += s->stride - 4;
300  // switch to right half
301  if (y == 7) {
302  s->pixel_ptr -= 8 * s->stride - 4;
303  P[0] = P[2];
304  P[1] = P[3];
305  flags = bytestream2_get_le32(&s->stream_ptr);
306  }
307  }
308 
309  } else {
310 
311  /* horizontal split; top & bottom halves are 2-color encoded */
312 
313  for (y = 0; y < 8; y++) {
314  if (y == 4) {
315  P[0] = P[2];
316  P[1] = P[3];
317  flags = bytestream2_get_le32(&s->stream_ptr);
318  }
319 
320  for (x = 0; x < 8; x++, flags >>= 1)
321  *s->pixel_ptr++ = P[flags & 1];
322  s->pixel_ptr += s->line_inc;
323  }
324  }
325  }
326 
327  /* report success */
328  return 0;
329 }
330 
332 {
333  int x, y;
334  unsigned char P[4];
335 
336  if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
337  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
338  return AVERROR_INVALIDDATA;
339  }
340 
341  /* 4-color encoding */
342  bytestream2_get_buffer(&s->stream_ptr, P, 4);
343 
344  if (P[0] <= P[1]) {
345  if (P[2] <= P[3]) {
346 
347  /* 1 of 4 colors for each pixel, need 16 more bytes */
348  for (y = 0; y < 8; y++) {
349  /* get the next set of 8 2-bit flags */
350  int flags = bytestream2_get_le16(&s->stream_ptr);
351  for (x = 0; x < 8; x++, flags >>= 2)
352  *s->pixel_ptr++ = P[flags & 0x03];
353  s->pixel_ptr += s->line_inc;
354  }
355 
356  } else {
357  uint32_t flags;
358 
359  /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
360  flags = bytestream2_get_le32(&s->stream_ptr);
361 
362  for (y = 0; y < 8; y += 2) {
363  for (x = 0; x < 8; x += 2, flags >>= 2) {
364  s->pixel_ptr[x ] =
365  s->pixel_ptr[x + 1 ] =
366  s->pixel_ptr[x + s->stride] =
367  s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
368  }
369  s->pixel_ptr += s->stride * 2;
370  }
371 
372  }
373  } else {
374  uint64_t flags;
375 
376  /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
377  flags = bytestream2_get_le64(&s->stream_ptr);
378  if (P[2] <= P[3]) {
379  for (y = 0; y < 8; y++) {
380  for (x = 0; x < 8; x += 2, flags >>= 2) {
381  s->pixel_ptr[x ] =
382  s->pixel_ptr[x + 1] = P[flags & 0x03];
383  }
384  s->pixel_ptr += s->stride;
385  }
386  } else {
387  for (y = 0; y < 8; y += 2) {
388  for (x = 0; x < 8; x++, flags >>= 2) {
389  s->pixel_ptr[x ] =
390  s->pixel_ptr[x + s->stride] = P[flags & 0x03];
391  }
392  s->pixel_ptr += s->stride * 2;
393  }
394  }
395  }
396 
397  /* report success */
398  return 0;
399 }
400 
402 {
403  int x, y;
404  unsigned char P[8];
405  int flags = 0;
406 
407  if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
408  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
409  return AVERROR_INVALIDDATA;
410  }
411 
412  bytestream2_get_buffer(&s->stream_ptr, P, 4);
413 
414  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
415  * either top and bottom or left and right halves */
416  if (P[0] <= P[1]) {
417 
418  /* 4-color encoding for each quadrant; need 32 bytes */
419  for (y = 0; y < 16; y++) {
420  // new values for each 4x4 block
421  if (!(y & 3)) {
422  if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
423  flags = bytestream2_get_le32(&s->stream_ptr);
424  }
425 
426  for (x = 0; x < 4; x++, flags >>= 2)
427  *s->pixel_ptr++ = P[flags & 0x03];
428 
429  s->pixel_ptr += s->stride - 4;
430  // switch to right half
431  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
432  }
433 
434  } else {
435  // vertical split?
436  int vert;
437  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
438 
439  bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
440  vert = P[4] <= P[5];
441 
442  /* 4-color encoding for either left and right or top and bottom
443  * halves */
444 
445  for (y = 0; y < 16; y++) {
446  for (x = 0; x < 4; x++, flags >>= 2)
447  *s->pixel_ptr++ = P[flags & 0x03];
448 
449  if (vert) {
450  s->pixel_ptr += s->stride - 4;
451  // switch to right half
452  if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
453  } else if (y & 1) s->pixel_ptr += s->line_inc;
454 
455  // load values for second half
456  if (y == 7) {
457  memcpy(P, P + 4, 4);
458  flags = bytestream2_get_le64(&s->stream_ptr);
459  }
460  }
461  }
462 
463  /* report success */
464  return 0;
465 }
466 
468 {
469  int y;
470 
471  /* 64-color encoding (each pixel in block is a different color) */
472  for (y = 0; y < 8; y++) {
473  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
474  s->pixel_ptr += s->stride;
475  }
476 
477  /* report success */
478  return 0;
479 }
480 
482 {
483  int x, y;
484 
485  /* 16-color block encoding: each 2x2 block is a different color */
486  for (y = 0; y < 8; y += 2) {
487  for (x = 0; x < 8; x += 2) {
488  s->pixel_ptr[x ] =
489  s->pixel_ptr[x + 1 ] =
490  s->pixel_ptr[x + s->stride] =
491  s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
492  }
493  s->pixel_ptr += s->stride * 2;
494  }
495 
496  /* report success */
497  return 0;
498 }
499 
501 {
502  int y;
503  unsigned char P[2];
504 
505  if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
506  av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
507  return AVERROR_INVALIDDATA;
508  }
509 
510  /* 4-color block encoding: each 4x4 block is a different color */
511  for (y = 0; y < 8; y++) {
512  if (!(y & 3)) {
513  P[0] = bytestream2_get_byte(&s->stream_ptr);
514  P[1] = bytestream2_get_byte(&s->stream_ptr);
515  }
516  memset(s->pixel_ptr, P[0], 4);
517  memset(s->pixel_ptr + 4, P[1], 4);
518  s->pixel_ptr += s->stride;
519  }
520 
521  /* report success */
522  return 0;
523 }
524 
526 {
527  int y;
528  unsigned char pix;
529 
530  /* 1-color encoding: the whole block is 1 solid color */
531  pix = bytestream2_get_byte(&s->stream_ptr);
532 
533  for (y = 0; y < 8; y++) {
534  memset(s->pixel_ptr, pix, 8);
535  s->pixel_ptr += s->stride;
536  }
537 
538  /* report success */
539  return 0;
540 }
541 
543 {
544  int x, y;
545  unsigned char sample[2];
546 
547  /* dithered encoding */
548  sample[0] = bytestream2_get_byte(&s->stream_ptr);
549  sample[1] = bytestream2_get_byte(&s->stream_ptr);
550 
551  for (y = 0; y < 8; y++) {
552  for (x = 0; x < 8; x += 2) {
553  *s->pixel_ptr++ = sample[ y & 1 ];
554  *s->pixel_ptr++ = sample[!(y & 1)];
555  }
556  s->pixel_ptr += s->line_inc;
557  }
558 
559  /* report success */
560  return 0;
561 }
562 
564 {
565  signed char x, y;
566 
567  /* copy a block from the second last frame using an expanded range */
568  x = bytestream2_get_byte(&s->stream_ptr);
569  y = bytestream2_get_byte(&s->stream_ptr);
570 
571  ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
572  return copy_from(s, s->second_last_frame, frame, x, y);
573 }
574 
576 {
577  int x, y;
578  uint16_t P[2];
579  unsigned int flags;
580  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
581 
582  /* 2-color encoding */
583  P[0] = bytestream2_get_le16(&s->stream_ptr);
584  P[1] = bytestream2_get_le16(&s->stream_ptr);
585 
586  if (!(P[0] & 0x8000)) {
587 
588  for (y = 0; y < 8; y++) {
589  flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
590  for (; flags != 1; flags >>= 1)
591  *pixel_ptr++ = P[flags & 1];
592  pixel_ptr += s->line_inc;
593  }
594 
595  } else {
596 
597  flags = bytestream2_get_le16(&s->stream_ptr);
598  for (y = 0; y < 8; y += 2) {
599  for (x = 0; x < 8; x += 2, flags >>= 1) {
600  pixel_ptr[x ] =
601  pixel_ptr[x + 1 ] =
602  pixel_ptr[x + s->stride] =
603  pixel_ptr[x + 1 + s->stride] = P[flags & 1];
604  }
605  pixel_ptr += s->stride * 2;
606  }
607  }
608 
609  return 0;
610 }
611 
613 {
614  int x, y;
615  uint16_t P[4];
616  unsigned int flags = 0;
617  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
618 
619  /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
620  * either top and bottom or left and right halves */
621  P[0] = bytestream2_get_le16(&s->stream_ptr);
622  P[1] = bytestream2_get_le16(&s->stream_ptr);
623 
624  if (!(P[0] & 0x8000)) {
625 
626  for (y = 0; y < 16; y++) {
627  // new values for each 4x4 block
628  if (!(y & 3)) {
629  if (y) {
630  P[0] = bytestream2_get_le16(&s->stream_ptr);
631  P[1] = bytestream2_get_le16(&s->stream_ptr);
632  }
633  flags = bytestream2_get_le16(&s->stream_ptr);
634  }
635 
636  for (x = 0; x < 4; x++, flags >>= 1)
637  *pixel_ptr++ = P[flags & 1];
638  pixel_ptr += s->stride - 4;
639  // switch to right half
640  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
641  }
642 
643  } else {
644 
645  flags = bytestream2_get_le32(&s->stream_ptr);
646  P[2] = bytestream2_get_le16(&s->stream_ptr);
647  P[3] = bytestream2_get_le16(&s->stream_ptr);
648 
649  if (!(P[2] & 0x8000)) {
650 
651  /* vertical split; left & right halves are 2-color encoded */
652 
653  for (y = 0; y < 16; y++) {
654  for (x = 0; x < 4; x++, flags >>= 1)
655  *pixel_ptr++ = P[flags & 1];
656  pixel_ptr += s->stride - 4;
657  // switch to right half
658  if (y == 7) {
659  pixel_ptr -= 8 * s->stride - 4;
660  P[0] = P[2];
661  P[1] = P[3];
662  flags = bytestream2_get_le32(&s->stream_ptr);
663  }
664  }
665 
666  } else {
667 
668  /* horizontal split; top & bottom halves are 2-color encoded */
669 
670  for (y = 0; y < 8; y++) {
671  if (y == 4) {
672  P[0] = P[2];
673  P[1] = P[3];
674  flags = bytestream2_get_le32(&s->stream_ptr);
675  }
676 
677  for (x = 0; x < 8; x++, flags >>= 1)
678  *pixel_ptr++ = P[flags & 1];
679  pixel_ptr += s->line_inc;
680  }
681  }
682  }
683 
684  /* report success */
685  return 0;
686 }
687 
689 {
690  int x, y;
691  uint16_t P[4];
692  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
693 
694  /* 4-color encoding */
695  for (x = 0; x < 4; x++)
696  P[x] = bytestream2_get_le16(&s->stream_ptr);
697 
698  if (!(P[0] & 0x8000)) {
699  if (!(P[2] & 0x8000)) {
700 
701  /* 1 of 4 colors for each pixel */
702  for (y = 0; y < 8; y++) {
703  /* get the next set of 8 2-bit flags */
704  int flags = bytestream2_get_le16(&s->stream_ptr);
705  for (x = 0; x < 8; x++, flags >>= 2)
706  *pixel_ptr++ = P[flags & 0x03];
707  pixel_ptr += s->line_inc;
708  }
709 
710  } else {
711  uint32_t flags;
712 
713  /* 1 of 4 colors for each 2x2 block */
714  flags = bytestream2_get_le32(&s->stream_ptr);
715 
716  for (y = 0; y < 8; y += 2) {
717  for (x = 0; x < 8; x += 2, flags >>= 2) {
718  pixel_ptr[x ] =
719  pixel_ptr[x + 1 ] =
720  pixel_ptr[x + s->stride] =
721  pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
722  }
723  pixel_ptr += s->stride * 2;
724  }
725 
726  }
727  } else {
728  uint64_t flags;
729 
730  /* 1 of 4 colors for each 2x1 or 1x2 block */
731  flags = bytestream2_get_le64(&s->stream_ptr);
732  if (!(P[2] & 0x8000)) {
733  for (y = 0; y < 8; y++) {
734  for (x = 0; x < 8; x += 2, flags >>= 2) {
735  pixel_ptr[x ] =
736  pixel_ptr[x + 1] = P[flags & 0x03];
737  }
738  pixel_ptr += s->stride;
739  }
740  } else {
741  for (y = 0; y < 8; y += 2) {
742  for (x = 0; x < 8; x++, flags >>= 2) {
743  pixel_ptr[x ] =
744  pixel_ptr[x + s->stride] = P[flags & 0x03];
745  }
746  pixel_ptr += s->stride * 2;
747  }
748  }
749  }
750 
751  /* report success */
752  return 0;
753 }
754 
756 {
757  int x, y;
758  uint16_t P[8];
759  int flags = 0;
760  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
761 
762  for (x = 0; x < 4; x++)
763  P[x] = bytestream2_get_le16(&s->stream_ptr);
764 
765  /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
766  * either top and bottom or left and right halves */
767  if (!(P[0] & 0x8000)) {
768 
769  /* 4-color encoding for each quadrant */
770  for (y = 0; y < 16; y++) {
771  // new values for each 4x4 block
772  if (!(y & 3)) {
773  if (y)
774  for (x = 0; x < 4; x++)
775  P[x] = bytestream2_get_le16(&s->stream_ptr);
776  flags = bytestream2_get_le32(&s->stream_ptr);
777  }
778 
779  for (x = 0; x < 4; x++, flags >>= 2)
780  *pixel_ptr++ = P[flags & 0x03];
781 
782  pixel_ptr += s->stride - 4;
783  // switch to right half
784  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
785  }
786 
787  } else {
788  // vertical split?
789  int vert;
790  uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
791 
792  for (x = 4; x < 8; x++)
793  P[x] = bytestream2_get_le16(&s->stream_ptr);
794  vert = !(P[4] & 0x8000);
795 
796  /* 4-color encoding for either left and right or top and bottom
797  * halves */
798 
799  for (y = 0; y < 16; y++) {
800  for (x = 0; x < 4; x++, flags >>= 2)
801  *pixel_ptr++ = P[flags & 0x03];
802 
803  if (vert) {
804  pixel_ptr += s->stride - 4;
805  // switch to right half
806  if (y == 7) pixel_ptr -= 8 * s->stride - 4;
807  } else if (y & 1) pixel_ptr += s->line_inc;
808 
809  // load values for second half
810  if (y == 7) {
811  memcpy(P, P + 4, 8);
812  flags = bytestream2_get_le64(&s->stream_ptr);
813  }
814  }
815  }
816 
817  /* report success */
818  return 0;
819 }
820 
822 {
823  int x, y;
824  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
825 
826  /* 64-color encoding (each pixel in block is a different color) */
827  for (y = 0; y < 8; y++) {
828  for (x = 0; x < 8; x++)
829  pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
830  pixel_ptr += s->stride;
831  }
832 
833  /* report success */
834  return 0;
835 }
836 
838 {
839  int x, y;
840  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
841 
842  /* 16-color block encoding: each 2x2 block is a different color */
843  for (y = 0; y < 8; y += 2) {
844  for (x = 0; x < 8; x += 2) {
845  pixel_ptr[x ] =
846  pixel_ptr[x + 1 ] =
847  pixel_ptr[x + s->stride] =
848  pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
849  }
850  pixel_ptr += s->stride * 2;
851  }
852 
853  /* report success */
854  return 0;
855 }
856 
858 {
859  int x, y;
860  uint16_t P[2];
861  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
862 
863  /* 4-color block encoding: each 4x4 block is a different color */
864  for (y = 0; y < 8; y++) {
865  if (!(y & 3)) {
866  P[0] = bytestream2_get_le16(&s->stream_ptr);
867  P[1] = bytestream2_get_le16(&s->stream_ptr);
868  }
869  for (x = 0; x < 8; x++)
870  pixel_ptr[x] = P[x >> 2];
871  pixel_ptr += s->stride;
872  }
873 
874  /* report success */
875  return 0;
876 }
877 
879 {
880  int x, y;
881  uint16_t pix;
882  uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
883 
884  /* 1-color encoding: the whole block is 1 solid color */
885  pix = bytestream2_get_le16(&s->stream_ptr);
886 
887  for (y = 0; y < 8; y++) {
888  for (x = 0; x < 8; x++)
889  pixel_ptr[x] = pix;
890  pixel_ptr += s->stride;
891  }
892 
893  /* report success */
894  return 0;
895 }
896 
906 };
907 
917 };
918 
920 {
921  int line;
922 
923  if (!opcode) {
924  for (line = 0; line < 8; ++line) {
925  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
926  s->pixel_ptr += s->stride;
927  }
928  } else {
929  /* Don't try to copy second_last_frame data on the first frames */
930  if (s->avctx->frame_number > 2)
931  copy_from(s, s->second_last_frame, frame, 0, 0);
932  }
933 }
934 
936 {
937  int off_x, off_y;
938 
939  if (opcode < 0) {
940  off_x = ((uint16_t)opcode - 0xC000) % frame->width;
941  off_y = ((uint16_t)opcode - 0xC000) / frame->width;
942  copy_from(s, s->last_frame, frame, off_x, off_y);
943  } else if (opcode > 0) {
944  off_x = ((uint16_t)opcode - 0x4000) % frame->width;
945  off_y = ((uint16_t)opcode - 0x4000) / frame->width;
946  copy_from(s, frame, frame, off_x, off_y);
947  }
948 }
949 
950 static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
952 };
953 
955 {
956  int pass, x, y;
957  int16_t opcode;
958  GetByteContext decoding_map_ptr;
959 
960  /* this is PAL8, so make the palette available */
961  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
962  s->stride = frame->linesize[0];
963 
964  s->line_inc = s->stride - 8;
965  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
966  + (s->avctx->width - 8) * (1 + s->is_16bpp);
967 
968  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
969 
970  for (pass = 0; pass < 2; ++pass) {
971  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
972  for (y = 0; y < s->avctx->height; y += 8) {
973  for (x = 0; x < s->avctx->width; x += 8) {
974  opcode = bytestream2_get_le16(&decoding_map_ptr);
975 
976  ff_tlog(s->avctx,
977  " block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n",
978  x, y, opcode, bytestream2_tell(&s->stream_ptr));
979 
980  s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0];
982  }
983  }
984  }
985 
986  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
987  av_log(s->avctx, AV_LOG_DEBUG,
988  "decode finished with %d bytes left over\n",
989  bytestream2_get_bytes_left(&s->stream_ptr));
990  }
991 }
992 
994 {
995  int line;
996 
997  if (!opcode) {
998  for (line = 0; line < 8; ++line) {
999  bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
1000  s->pixel_ptr += s->stride;
1001  }
1002  }
1003 }
1004 
1006 {
1007  int off_x, off_y;
1008 
1009  if (opcode < 0) {
1010  off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->width;
1011  off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->width;
1012  copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y);
1013  } else if (opcode > 0) {
1014  off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->width;
1015  off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->width;
1016  copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y);
1017  }
1018 }
1019 
1020 static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
1022 };
1023 
1025 {
1026  int pass, x, y, changed_block;
1027  int16_t opcode, skip;
1028  GetByteContext decoding_map_ptr;
1029  GetByteContext skip_map_ptr;
1030 
1031  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1032 
1033  /* this is PAL8, so make the palette available */
1034  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1035  s->stride = frame->linesize[0];
1036 
1037  s->line_inc = s->stride - 8;
1038  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1039  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1040 
1041  bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
1042  bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size);
1043 
1044  for (pass = 0; pass < 2; ++pass) {
1045  bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
1046  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1047  skip = bytestream2_get_le16(&skip_map_ptr);
1048 
1049  for (y = 0; y < s->avctx->height; y += 8) {
1050  for (x = 0; x < s->avctx->width; x += 8) {
1051  s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0];
1052 
1053  while (skip <= 0) {
1054  if (skip != -0x8000 && skip) {
1055  opcode = bytestream2_get_le16(&decoding_map_ptr);
1056  ipvideo_format_10_passes[pass](s, frame, opcode);
1057  break;
1058  }
1059  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1060  return;
1061  skip = bytestream2_get_le16(&skip_map_ptr);
1062  }
1063  skip *= 2;
1064  }
1065  }
1066  }
1067 
1068  bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1069  skip = bytestream2_get_le16(&skip_map_ptr);
1070  for (y = 0; y < s->avctx->height; y += 8) {
1071  for (x = 0; x < s->avctx->width; x += 8) {
1072  changed_block = 0;
1073  s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0];
1074 
1075  while (skip <= 0) {
1076  if (skip != -0x8000 && skip) {
1077  changed_block = 1;
1078  break;
1079  }
1080  if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1081  return;
1082  skip = bytestream2_get_le16(&skip_map_ptr);
1083  }
1084 
1085  if (changed_block) {
1086  copy_from(s, s->cur_decode_frame, frame, 0, 0);
1087  } else {
1088  /* Don't try to copy last_frame data on the first frame */
1089  if (s->avctx->frame_number)
1090  copy_from(s, s->last_frame, frame, 0, 0);
1091  }
1092  skip *= 2;
1093  }
1094  }
1095 
1096  FFSWAP(AVFrame*, s->prev_decode_frame, s->cur_decode_frame);
1097 
1098  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1099  av_log(s->avctx, AV_LOG_DEBUG,
1100  "decode finished with %d bytes left over\n",
1101  bytestream2_get_bytes_left(&s->stream_ptr));
1102  }
1103 }
1104 
1106 {
1107  int x, y;
1108  unsigned char opcode;
1109  int ret;
1110  GetBitContext gb;
1111 
1112  bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1113  if (!s->is_16bpp) {
1114  /* this is PAL8, so make the palette available */
1115  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1116 
1117  s->stride = frame->linesize[0];
1118  } else {
1119  s->stride = frame->linesize[0] >> 1;
1120  s->mv_ptr = s->stream_ptr;
1121  bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
1122  }
1123  s->line_inc = s->stride - 8;
1124  s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1125  + (s->avctx->width - 8) * (1 + s->is_16bpp);
1126 
1127  init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
1128  for (y = 0; y < s->avctx->height; y += 8) {
1129  for (x = 0; x < s->avctx->width; x += 8) {
1130  if (get_bits_left(&gb) < 4)
1131  return;
1132  opcode = get_bits(&gb, 4);
1133 
1134  ff_tlog(s->avctx,
1135  " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
1136  x, y, opcode, bytestream2_tell(&s->stream_ptr));
1137 
1138  if (!s->is_16bpp) {
1139  s->pixel_ptr = frame->data[0] + x
1140  + y*frame->linesize[0];
1141  ret = ipvideo_decode_block[opcode](s, frame);
1142  } else {
1143  s->pixel_ptr = frame->data[0] + x*2
1144  + y*frame->linesize[0];
1145  ret = ipvideo_decode_block16[opcode](s, frame);
1146  }
1147  if (ret != 0) {
1148  av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
1149  s->avctx->frame_number, x, y);
1150  return;
1151  }
1152  }
1153  }
1154  if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1155  av_log(s->avctx, AV_LOG_DEBUG,
1156  "decode finished with %d bytes left over\n",
1157  bytestream2_get_bytes_left(&s->stream_ptr));
1158  }
1159 }
1160 
1162 {
1163  IpvideoContext *s = avctx->priv_data;
1164 
1165  s->avctx = avctx;
1166 
1167  s->is_16bpp = avctx->bits_per_coded_sample == 16;
1168  avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
1169 
1170  ff_hpeldsp_init(&s->hdsp, avctx->flags);
1171 
1172  s->last_frame = av_frame_alloc();
1173  s->second_last_frame = av_frame_alloc();
1174  s->cur_decode_frame = av_frame_alloc();
1175  s->prev_decode_frame = av_frame_alloc();
1176  if (!s->last_frame || !s->second_last_frame ||
1177  !s->cur_decode_frame || !s->prev_decode_frame) {
1178  return AVERROR(ENOMEM);
1179  }
1180 
1181  s->cur_decode_frame->width = avctx->width;
1182  s->prev_decode_frame->width = avctx->width;
1183  s->cur_decode_frame->height = avctx->height;
1184  s->prev_decode_frame->height = avctx->height;
1185  s->cur_decode_frame->format = avctx->pix_fmt;
1186  s->prev_decode_frame->format = avctx->pix_fmt;
1187 
1188  return 0;
1189 }
1190 
1192  void *data, int *got_frame,
1193  AVPacket *avpkt)
1194 {
1195  const uint8_t *buf = avpkt->data;
1196  int buf_size = avpkt->size;
1197  IpvideoContext *s = avctx->priv_data;
1198  AVFrame *frame = data;
1199  int ret;
1200  int send_buffer;
1201  int frame_format;
1202  int video_data_size;
1203 
1205  av_frame_unref(s->last_frame);
1206  av_frame_unref(s->second_last_frame);
1207  av_frame_unref(s->cur_decode_frame);
1208  av_frame_unref(s->prev_decode_frame);
1209  }
1210 
1211  if (!s->cur_decode_frame->data[0]) {
1212  ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
1213  if (ret < 0)
1214  return ret;
1215 
1216  ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
1217  if (ret < 0) {
1218  av_frame_unref(s->cur_decode_frame);
1219  return ret;
1220  }
1221  }
1222 
1223  if (buf_size < 8)
1224  return AVERROR_INVALIDDATA;
1225 
1226  frame_format = AV_RL8(buf);
1227  send_buffer = AV_RL8(buf + 1);
1228  video_data_size = AV_RL16(buf + 2);
1229  s->decoding_map_size = AV_RL16(buf + 4);
1230  s->skip_map_size = AV_RL16(buf + 6);
1231 
1232  switch (frame_format) {
1233  case 0x06:
1234  if (s->decoding_map_size) {
1235  av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n");
1236  return AVERROR_INVALIDDATA;
1237  }
1238 
1239  if (s->skip_map_size) {
1240  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n");
1241  return AVERROR_INVALIDDATA;
1242  }
1243 
1244  if (s->is_16bpp) {
1245  av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n");
1246  return AVERROR_INVALIDDATA;
1247  }
1248 
1249  /* Decoding map for 0x06 frame format is at the top of pixeldata */
1250  s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2;
1251  s->decoding_map = buf + 8 + 14; /* 14 bits of op data */
1252  video_data_size -= s->decoding_map_size + 14;
1253  if (video_data_size <= 0 || s->decoding_map_size == 0)
1254  return AVERROR_INVALIDDATA;
1255 
1256  if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size)
1257  return AVERROR_INVALIDDATA;
1258 
1259  bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size);
1260 
1261  break;
1262 
1263  case 0x10:
1264  if (! s->decoding_map_size) {
1265  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n");
1266  return AVERROR_INVALIDDATA;
1267  }
1268 
1269  if (! s->skip_map_size) {
1270  av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n");
1271  return AVERROR_INVALIDDATA;
1272  }
1273 
1274  if (s->is_16bpp) {
1275  av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n");
1276  return AVERROR_INVALIDDATA;
1277  }
1278 
1279  if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size)
1280  return AVERROR_INVALIDDATA;
1281 
1282  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1283  s->decoding_map = buf + 8 + video_data_size;
1284  s->skip_map = buf + 8 + video_data_size + s->decoding_map_size;
1285 
1286  break;
1287 
1288  case 0x11:
1289  if (! s->decoding_map_size) {
1290  av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n");
1291  return AVERROR_INVALIDDATA;
1292  }
1293 
1294  if (s->skip_map_size) {
1295  av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n");
1296  return AVERROR_INVALIDDATA;
1297  }
1298 
1299  if (buf_size < 8 + video_data_size + s->decoding_map_size)
1300  return AVERROR_INVALIDDATA;
1301 
1302  bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1303  s->decoding_map = buf + 8 + video_data_size;
1304 
1305  break;
1306 
1307  default:
1308  av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format);
1309  }
1310 
1311  /* ensure we can't overread the packet */
1312  if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) {
1313  av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n");
1314  return AVERROR_INVALIDDATA;
1315  }
1316 
1317  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1318  return ret;
1319 
1320  if (!s->is_16bpp) {
1321  frame->palette_has_changed = ff_copy_palette(s->pal, avpkt, avctx);
1322  }
1323 
1324  switch (frame_format) {
1325  case 0x06:
1327  break;
1328  case 0x10:
1330  break;
1331  case 0x11:
1333  break;
1334  }
1335 
1336  *got_frame = send_buffer;
1337 
1338  /* shuffle frames */
1339  av_frame_unref(s->second_last_frame);
1340  FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
1341  if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
1342  return ret;
1343 
1344  /* report that the buffer was completely consumed */
1345  return buf_size;
1346 }
1347 
1349 {
1350  IpvideoContext *s = avctx->priv_data;
1351 
1352  av_frame_free(&s->last_frame);
1353  av_frame_free(&s->second_last_frame);
1354  av_frame_free(&s->cur_decode_frame);
1355  av_frame_free(&s->prev_decode_frame);
1356 
1357  return 0;
1358 }
1359 
1361  .name = "interplayvideo",
1362  .long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1363  .type = AVMEDIA_TYPE_VIDEO,
1365  .priv_data_size = sizeof(IpvideoContext),
1367  .close = ipvideo_decode_end,
1369  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
1371 };
IpvideoContext::decoding_map
const unsigned char * decoding_map
Definition: interplayvideo.c:64
ipvideo_decode_init
static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
Definition: interplayvideo.c:1161
AVCodec
AVCodec.
Definition: codec.h:202
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:42
IpvideoContext::is_16bpp
int is_16bpp
Definition: interplayvideo.c:69
ipvideo_decode_block_opcode_0xB
static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:467
ipvideo_decode_block_opcode_0x3
static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:140
IpvideoContext::skip_map_size
int skip_map_size
Definition: interplayvideo.c:67
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:850
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ipvideo_format_10_passes
static void(*const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
Definition: interplayvideo.c:1020
GetByteContext
Definition: bytestream.h:33
AV_PKT_DATA_PARAM_CHANGE
@ AV_PKT_DATA_PARAM_CHANGE
An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows:
Definition: packet.h:72
ipvideo_decode_block_opcode_0x2
static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:116
ipvideo_decode_block_opcode_0xC
static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:481
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:109
ipvideo_decode_block_opcode_0x9
static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:331
bytestream2_seek
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
AVFrame::width
int width
Definition: frame.h:389
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:373
data
const char data[16]
Definition: mxf.c:143
IpvideoContext::mv_ptr
GetByteContext mv_ptr
Definition: interplayvideo.c:70
IpvideoContext::cur_decode_frame
AVFrame * cur_decode_frame
Definition: interplayvideo.c:61
ipvideo_format_10_secondpass
static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:1005
AV_RL8
#define AV_RL8(x)
Definition: intreadwrite.h:398
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:660
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:338
init
static int init
Definition: av_tx.c:47
ipvideo_format_06_passes
static void(*const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op)
Definition: interplayvideo.c:950
ipvideo_decode_block
static int(*const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:897
ipvideo_decode_block_opcode_0x7_16
static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:575
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:380
GetBitContext
Definition: get_bits.h:62
BL
#define BL(type, name)
Definition: vf_shear.c:161
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:463
ipvideo_decode_format_10_opcodes
static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:1024
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:97
IpvideoContext::prev_decode_frame
AVFrame * prev_decode_frame
Definition: interplayvideo.c:62
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
ipvideo_decode_block_opcode_0x8_16
static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:612
ipvideo_decode_block_opcode_0x4
static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:166
decode
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
width
#define width
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
IpvideoContext::skip_map
const unsigned char * skip_map
Definition: interplayvideo.c:66
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:361
op
static int op(uint8_t **dst, const uint8_t *dst_end, GetByteContext *gb, int pixel, int count, int *x, int width, int linesize)
Perform decode operation.
Definition: anm.c:75
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
decode.h
get_bits.h
AV_RL16
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:94
IpvideoContext::decoding_map_size
int decoding_map_size
Definition: interplayvideo.c:65
IpvideoContext::upper_motion_limit_offset
int upper_motion_limit_offset
Definition: interplayvideo.c:74
ipvideo_decode_block_opcode_0x6
static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:200
ipvideo_decode_block_opcode_0x9_16
static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:688
IpvideoContext::stream_ptr
GetByteContext stream_ptr
Definition: interplayvideo.c:70
pass
#define pass
Definition: fft_template.c:601
ff_hpeldsp_init
av_cold void ff_hpeldsp_init(HpelDSPContext *c, int flags)
Definition: hpeldsp.c:338
ipvideo_decode_block_opcode_0xA
static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:401
ipvideo_decode_block_opcode_0x1
static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:111
if
if(ret)
Definition: filter_design.txt:179
ipvideo_format_10_firstpass
static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:993
NULL
#define NULL
Definition: coverity.c:32
AV_CODEC_ID_INTERPLAY_VIDEO
@ AV_CODEC_ID_INTERPLAY_VIDEO
Definition: codec_id.h:89
ipvideo_decode_frame
static int ipvideo_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: interplayvideo.c:1191
IpvideoContext::stride
int stride
Definition: interplayvideo.c:73
ff_interplay_video_decoder
const AVCodec ff_interplay_video_decoder
Definition: interplayvideo.c:1360
ipvideo_decode_block_opcode_0x6_16
static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:563
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
src
#define src
Definition: vp8dsp.c:255
ipvideo_decode_format_11_opcodes
static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:1105
bytestream2_get_buffer
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:267
ipvideo_format_06_firstpass
static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:919
ipvideo_decode_block_opcode_0xE
static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:525
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
ipvideo_decode_end
static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
Definition: interplayvideo.c:1348
IpvideoContext::second_last_frame
AVFrame * second_last_frame
Definition: interplayvideo.c:57
IpvideoContext::hdsp
HpelDSPContext hdsp
Definition: interplayvideo.c:56
HpelDSPContext
Half-pel DSP context.
Definition: hpeldsp.h:45
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1652
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
ipvideo_decode_block_opcode_0x0
static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:106
ipvideo_decode_block_opcode_0x8
static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:253
AVPacket::size
int size
Definition: packet.h:374
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:325
P
#define P
sample
#define sample
Definition: flacdsp_template.c:44
ipvideo_format_06_secondpass
static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
Definition: interplayvideo.c:935
line
Definition: graph2dot.c:48
ipvideo_decode_block_opcode_0x5
static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:187
ipvideo_decode_block_opcode_0xB_16
static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:821
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1418
copy_from
static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
Definition: interplayvideo.c:79
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: avpacket.c:253
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:50
ipvideo_decode_block_opcode_0xC_16
static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:837
AV_PIX_FMT_RGB555
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:392
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:435
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:209
IpvideoContext::avctx
AVCodecContext * avctx
Definition: interplayvideo.c:55
AVCodecContext::height
int height
Definition: avcodec.h:556
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:593
ipvideo_decode_block_opcode_0xF
static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:542
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
B
#define B
Definition: huffyuvdsp.h:32
AVCodecContext
main external API structure.
Definition: avcodec.h:383
ipvideo_decode_block_opcode_0x7
static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:209
ipvideo_decode_format_06_opcodes
static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:954
IpvideoContext::pal
uint32_t pal[256]
Definition: interplayvideo.c:76
AV_CODEC_CAP_PARAM_CHANGE
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: codec.h:121
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
ipvideo_decode_block_opcode_0xE_16
static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:878
IpvideoContext::last_frame
AVFrame * last_frame
Definition: interplayvideo.c:58
ff_tlog
#define ff_tlog(ctx,...)
Definition: internal.h:205
AVPacket
This structure stores compressed data.
Definition: packet.h:350
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:410
ipvideo_decode_block16
static int(*const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:908
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:556
bytestream.h
hpeldsp.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:362
ipvideo_decode_block_opcode_0xD
static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:500
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
IpvideoContext::line_inc
int line_inc
Definition: interplayvideo.c:72
IpvideoContext::pixel_ptr
unsigned char * pixel_ptr
Definition: interplayvideo.c:71
ff_copy_palette
int ff_copy_palette(void *dst, const AVPacket *src, void *logctx)
Check whether the side-data of src contains a palette of size AVPALETTE_SIZE; if so,...
Definition: decode.c:1854
ipvideo_decode_block_opcode_0xD_16
static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:857
IpvideoContext
Definition: interplayvideo.c:53
int
int
Definition: ffmpeg_filter.c:153
line
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted line
Definition: swscale.txt:40
ipvideo_decode_block_opcode_0xA_16
static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
Definition: interplayvideo.c:755