FFmpeg
argo.c
Go to the documentation of this file.
1 /*
2  * Argonaut Games Video decoder
3  * Copyright (c) 2020 Paul B Mahol
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <stdio.h>
23 #include <stdlib.h>
24 #include <string.h>
25 
26 #include "libavutil/imgutils.h"
27 #include "libavutil/internal.h"
28 #include "libavutil/intreadwrite.h"
29 #include "libavutil/mem.h"
30 
31 #include "avcodec.h"
32 #include "bytestream.h"
33 #include "internal.h"
34 
35 typedef struct ArgoContext {
37 
38  int bpp;
39  int key;
40  int mv0[128][2];
41  int mv1[16][2];
42  uint32_t pal[256];
44 } ArgoContext;
45 
46 static int decode_pal8(AVCodecContext *avctx, uint32_t *pal)
47 {
48  ArgoContext *s = avctx->priv_data;
49  GetByteContext *gb = &s->gb;
50  int start, count;
51 
52  start = bytestream2_get_le16(gb);
53  count = bytestream2_get_le16(gb);
54 
55  if (start + count > 256)
56  return AVERROR_INVALIDDATA;
57 
58  if (bytestream2_get_bytes_left(gb) < 3 * count)
59  return AVERROR_INVALIDDATA;
60 
61  for (int i = 0; i < count; i++)
62  pal[start + i] = (0xFFU << 24) | bytestream2_get_be24u(gb);
63 
64  return 0;
65 }
66 
68 {
69  ArgoContext *s = avctx->priv_data;
70  GetByteContext *gb = &s->gb;
71  const int l = frame->linesize[0];
72  const uint8_t *map = gb->buffer;
73  uint8_t *dst = frame->data[0];
74 
75  if (bytestream2_get_bytes_left(gb) < 1024 + (frame->width / 2) * (frame->height / 2))
76  return AVERROR_INVALIDDATA;
77 
78  bytestream2_skipu(gb, 1024);
79  for (int y = 0; y < frame->height; y += 2) {
80  for (int x = 0; x < frame->width; x += 2) {
81  int index = bytestream2_get_byteu(gb);
82  const uint8_t *block = map + index * 4;
83 
84  dst[x+0] = block[0];
85  dst[x+1] = block[1];
86  dst[x+l] = block[2];
87  dst[x+l+1] = block[3];
88  }
89 
90  dst += frame->linesize[0] * 2;
91  }
92 
93  return 0;
94 }
95 
97 {
98  ArgoContext *s = avctx->priv_data;
99  GetByteContext *gb = &s->gb;
100  GetByteContext sb;
101  const int l = frame->linesize[0];
102  const uint8_t *map = gb->buffer;
103  uint8_t *dst = frame->data[0];
104  uint8_t codes = 0;
105  int count = 0;
106 
107  if (bytestream2_get_bytes_left(gb) < 1024 + (((frame->width / 2) * (frame->height / 2) + 7) >> 3))
108  return AVERROR_INVALIDDATA;
109 
110  bytestream2_skipu(gb, 1024);
111  sb = *gb;
112  bytestream2_skipu(gb, ((frame->width / 2) * (frame->height / 2) + 7) >> 3);
113 
114  for (int y = 0; y < frame->height; y += 2) {
115  for (int x = 0; x < frame->width; x += 2) {
116  const uint8_t *block;
117  int index;
118 
119  if (count == 0) {
120  codes = bytestream2_get_byteu(&sb);
121  count = 8;
122  }
123 
124  if (codes & 0x80) {
125  index = bytestream2_get_byte(gb);
126  block = map + index * 4;
127 
128  dst[x+0] = block[0];
129  dst[x+1] = block[1];
130  dst[x+l] = block[2];
131  dst[x+l+1] = block[3];
132  }
133 
134  codes <<= 1;
135  count--;
136  }
137 
138  dst += frame->linesize[0] * 2;
139  }
140 
141  return 0;
142 }
143 
145 {
146  ArgoContext *s = avctx->priv_data;
147  GetByteContext *gb = &s->gb;
148  const int w = frame->width;
149  const int h = frame->height;
150  const int l = frame->linesize[0];
151 
152  while (bytestream2_get_bytes_left(gb) > 0) {
153  int size, type, pos, dy;
154  uint8_t *dst;
155 
156  type = bytestream2_get_byte(gb);
157  if (type == 0xFF)
158  break;
159 
160  switch (type) {
161  case 8:
162  dst = frame->data[0];
163  for (int y = 0; y < h; y += 8) {
164  for (int x = 0; x < w; x += 8) {
165  int fill = bytestream2_get_byte(gb);
166  uint8_t *ddst = dst + x;
167 
168  for (int by = 0; by < 8; by++) {
169  memset(ddst, fill, 8);
170  ddst += l;
171  }
172  }
173 
174  dst += 8 * l;
175  }
176  break;
177  case 7:
178  while (bytestream2_get_bytes_left(gb) > 0) {
179  int bsize = bytestream2_get_byte(gb);
180  uint8_t *src;
181  int count;
182 
183  if (!bsize)
184  break;
185 
186  count = bytestream2_get_be16(gb);
187  while (count > 0) {
188  int mvx, mvy, a, b, c, mx, my;
189  int bsize_w, bsize_h;
190 
191  bsize_w = bsize_h = bsize;
192  if (bytestream2_get_bytes_left(gb) < 4)
193  return AVERROR_INVALIDDATA;
194  mvx = bytestream2_get_byte(gb) * bsize;
195  mvy = bytestream2_get_byte(gb) * bsize;
196  a = bytestream2_get_byte(gb);
197  b = bytestream2_get_byte(gb);
198  c = ((a & 0x3F) << 8) + b;
199  mx = mvx + (c & 0x7F) - 64;
200  my = mvy + (c >> 7) - 64;
201 
202  if (mvy < 0 || mvy >= h)
203  return AVERROR_INVALIDDATA;
204 
205  if (mvx < 0 || mvx >= w)
206  return AVERROR_INVALIDDATA;
207 
208  if (my < 0 || my >= h)
209  return AVERROR_INVALIDDATA;
210 
211  if (mx < 0 || mx >= w)
212  return AVERROR_INVALIDDATA;
213 
214  dst = frame->data[0] + mvx + l * mvy;
215  src = frame->data[0] + mx + l * my;
216 
217  bsize_w = FFMIN3(bsize_w, w - mvx, w - mx);
218  bsize_h = FFMIN3(bsize_h, h - mvy, h - my);
219 
220  if (mvy >= my && (mvy != my || mvx >= mx)) {
221  src += (bsize_h - 1) * l;
222  dst += (bsize_h - 1) * l;
223  for (int by = 0; by < bsize_h; by++) {
224  memmove(dst, src, bsize_w);
225  src -= l;
226  dst -= l;
227  }
228  } else {
229  for (int by = 0; by < bsize_h; by++) {
230  memmove(dst, src, bsize_w);
231  src += l;
232  dst += l;
233  }
234  }
235 
236  count--;
237  }
238  }
239  break;
240  case 6:
241  dst = frame->data[0];
242  if (bytestream2_get_bytes_left(gb) < w * h)
243  return AVERROR_INVALIDDATA;
244  for (int y = 0; y < h; y++) {
245  bytestream2_get_bufferu(gb, dst, w);
246  dst += l;
247  }
248  break;
249  case 5:
250  dst = frame->data[0];
251  for (int y = 0; y < h; y += 2) {
252  for (int x = 0; x < w; x += 2) {
253  int fill = bytestream2_get_byte(gb);
254  uint8_t *ddst = dst + x;
255 
256  fill = (fill << 8) | fill;
257  for (int by = 0; by < 2; by++) {
258  AV_WN16(ddst, fill);
259 
260  ddst += l;
261  }
262  }
263 
264  dst += 2 * l;
265  }
266  break;
267  case 3:
268  size = bytestream2_get_le16(gb);
269  if (size > 0) {
270  int x = bytestream2_get_byte(gb) * 4;
271  int y = bytestream2_get_byte(gb) * 4;
272  int count = bytestream2_get_byte(gb);
273  int fill = bytestream2_get_byte(gb);
274 
275  av_log(avctx, AV_LOG_DEBUG, "%d %d %d %d\n", x, y, count, fill);
276  for (int i = 0; i < count; i++)
277  ;
278  return AVERROR_PATCHWELCOME;
279  }
280  break;
281  case 2:
282  dst = frame->data[0];
283  pos = 0;
284  dy = 0;
285  while (bytestream2_get_bytes_left(gb) > 0) {
286  int count = bytestream2_get_byteu(gb);
287  int skip = count & 0x3F;
288 
289  count = count >> 6;
290  if (skip == 0x3F) {
291  pos += 0x3E;
292  while (pos >= w) {
293  pos -= w;
294  dst += l;
295  dy++;
296  if (dy >= h)
297  return 0;
298  }
299  } else {
300  pos += skip;
301  while (pos >= w) {
302  pos -= w;
303  dst += l;
304  dy++;
305  if (dy >= h)
306  return 0;
307  }
308  while (count >= 0) {
309  int bits = bytestream2_get_byte(gb);
310 
311  for (int i = 0; i < 4; i++) {
312  switch (bits & 3) {
313  case 0:
314  break;
315  case 1:
316  if (dy < 1 && !pos)
317  return AVERROR_INVALIDDATA;
318  else
319  dst[pos] = pos ? dst[pos - 1] : dst[-l + w - 1];
320  break;
321  case 2:
322  if (dy < 1)
323  return AVERROR_INVALIDDATA;
324  dst[pos] = dst[pos - l];
325  break;
326  case 3:
327  dst[pos] = bytestream2_get_byte(gb);
328  break;
329  }
330 
331  pos++;
332  if (pos >= w) {
333  pos -= w;
334  dst += l;
335  dy++;
336  if (dy >= h)
337  return 0;
338  }
339  bits >>= 2;
340  }
341  count--;
342  }
343  }
344  }
345  break;
346  default:
347  return AVERROR_INVALIDDATA;
348  }
349  }
350 
351  return 0;
352 }
353 
355 {
356  ArgoContext *s = avctx->priv_data;
357  GetByteContext *gb = &s->gb;
358  const int w = frame->width;
359  const int h = frame->height;
360  const int l = frame->linesize[0] / 4;
361 
362  while (bytestream2_get_bytes_left(gb) > 0) {
363  int osize, type, pos, dy, di, bcode, value, v14;
364  const uint8_t *bits;
365  uint32_t *dst;
366 
367  type = bytestream2_get_byte(gb);
368  if (type == 0xFF)
369  return 0;
370 
371  switch (type) {
372  case 8:
373  dst = (uint32_t *)frame->data[0];
374  for (int y = 0; y + 12 <= h; y += 12) {
375  for (int x = 0; x + 12 <= w; x += 12) {
376  int fill = bytestream2_get_be24(gb);
377  uint32_t *dstp = dst + x;
378 
379  for (int by = 0; by < 12; by++) {
380  for (int bx = 0; bx < 12; bx++)
381  dstp[bx] = fill;
382 
383  dstp += l;
384  }
385  }
386 
387  dst += 12 * l;
388  }
389  break;
390  case 7:
391  while (bytestream2_get_bytes_left(gb) > 0) {
392  int bsize = bytestream2_get_byte(gb);
393  uint32_t *src;
394  int count;
395 
396  if (!bsize)
397  break;
398 
399  count = bytestream2_get_be16(gb);
400  while (count > 0) {
401  int mvx, mvy, a, b, c, mx, my;
402  int bsize_w, bsize_h;
403 
404  bsize_w = bsize_h = bsize;
405  if (bytestream2_get_bytes_left(gb) < 4)
406  return AVERROR_INVALIDDATA;
407  mvx = bytestream2_get_byte(gb) * bsize;
408  mvy = bytestream2_get_byte(gb) * bsize;
409  a = bytestream2_get_byte(gb);
410  b = bytestream2_get_byte(gb);
411  c = ((a & 0x3F) << 8) + b;
412  mx = mvx + (c & 0x7F) - 64;
413  my = mvy + (c >> 7) - 64;
414 
415  if (mvy < 0 || mvy >= h)
416  return AVERROR_INVALIDDATA;
417 
418  if (mvx < 0 || mvx >= w)
419  return AVERROR_INVALIDDATA;
420 
421  if (my < 0 || my >= h)
422  return AVERROR_INVALIDDATA;
423 
424  if (mx < 0 || mx >= w)
425  return AVERROR_INVALIDDATA;
426 
427  dst = (uint32_t *)frame->data[0] + mvx + l * mvy;
428  src = (uint32_t *)frame->data[0] + mx + l * my;
429 
430  bsize_w = FFMIN3(bsize_w, w - mvx, w - mx);
431  bsize_h = FFMIN3(bsize_h, h - mvy, h - my);
432 
433  if (mvy >= my && (mvy != my || mvx >= mx)) {
434  src += (bsize_h - 1) * l;
435  dst += (bsize_h - 1) * l;
436  for (int by = 0; by < bsize_h; by++) {
437  memmove(dst, src, bsize_w * 4);
438  src -= l;
439  dst -= l;
440  }
441  } else {
442  for (int by = 0; by < bsize_h; by++) {
443  memmove(dst, src, bsize_w * 4);
444  src += l;
445  dst += l;
446  }
447  }
448 
449  count--;
450  }
451  }
452  break;
453  case 12:
454  osize = ((h + 3) / 4) * ((w + 3) / 4) + 7;
455  bits = gb->buffer;
456  di = 0;
457  bcode = v14 = 0;
458  if (bytestream2_get_bytes_left(gb) < osize >> 3)
459  return AVERROR_INVALIDDATA;
460  bytestream2_skip(gb, osize >> 3);
461  for (int x = 0; x < w; x += 4) {
462  for (int y = 0; y < h; y += 4) {
463  int astate = 0;
464 
465  if (bits[di >> 3] & (1 << (di & 7))) {
466  int codes = bytestream2_get_byte(gb);
467 
468  for (int count = 0; count < 4; count++) {
469  uint32_t *src = (uint32_t *)frame->data[0];
470  size_t src_size = l * (h - 1) + (w - 1);
471  int nv, v, code = codes & 3;
472 
473  pos = x;
474  dy = y + count;
475  dst = (uint32_t *)frame->data[0] + pos + dy * l;
476  if (code & 1)
477  bcode = bytestream2_get_byte(gb);
478  if (code == 3) {
479  for (int j = 0; j < 4; j++) {
480  switch (bcode & 3) {
481  case 0:
482  break;
483  case 1:
484  if (dy < 1 && !pos)
485  return AVERROR_INVALIDDATA;
486  dst[0] = dst[-1];
487  break;
488  case 2:
489  if (dy < 1)
490  return AVERROR_INVALIDDATA;
491  dst[0] = dst[-l];
492  break;
493  case 3:
494  if (astate) {
495  nv = value >> 4;
496  } else {
497  value = bytestream2_get_byte(gb);
498  nv = value & 0xF;
499  }
500  astate ^= 1;
501  dst[0] = src[av_clip(l * (dy + s->mv1[nv][1]) + pos +
502  s->mv1[nv][0], 0, src_size)];
503  break;
504  }
505 
506  bcode >>= 2;
507  dst++;
508  pos++;
509  }
510  } else if (code) {
511  if (code == 1)
512  v14 = bcode;
513  else
514  bcode = v14;
515  for (int j = 0; j < 4; j++) {
516  switch (bcode & 3) {
517  case 0:
518  break;
519  case 1:
520  if (dy < 1 && !pos)
521  return AVERROR_INVALIDDATA;
522  dst[0] = dst[-1];
523  break;
524  case 2:
525  if (dy < 1)
526  return AVERROR_INVALIDDATA;
527  dst[0] = dst[-l];
528  break;
529  case 3:
530  v = bytestream2_get_byte(gb);
531  if (v < 128) {
532  dst[0] = src[av_clip(l * (dy + s->mv0[v][1]) + pos +
533  s->mv0[v][0], 0, src_size)];
534  } else {
535  dst[0] = ((v & 0x7F) << 17) | bytestream2_get_be16(gb);
536  }
537  break;
538  }
539 
540  bcode >>= 2;
541  dst++;
542  pos++;
543  }
544  }
545 
546  codes >>= 2;
547  }
548  }
549 
550  di++;
551  }
552  }
553  break;
554  default:
555  return AVERROR_INVALIDDATA;
556  }
557  }
558 
559  return AVERROR_INVALIDDATA;
560 }
561 
563 {
564  ArgoContext *s = avctx->priv_data;
565  GetByteContext *gb = &s->gb;
566  const int w = frame->width;
567  const int h = frame->height;
568  const int l = frame->linesize[0];
569  uint8_t *dst = frame->data[0];
570  int pos = 0, y = 0;
571 
572  while (bytestream2_get_bytes_left(gb) > 0) {
573  int count = bytestream2_get_byte(gb);
574  int pixel = bytestream2_get_byte(gb);
575 
576  if (!count) {
577  pos += pixel;
578  while (pos >= w) {
579  pos -= w;
580  y++;
581  if (y >= h)
582  return 0;
583  }
584  } else {
585  while (count > 0) {
586  dst[pos + y * l] = pixel;
587  count--;
588  pos++;
589  if (pos >= w) {
590  pos = 0;
591  y++;
592  if (y >= h)
593  return 0;
594  }
595  }
596  }
597  }
598 
599  return 0;
600 }
601 
602 static int decode_frame(AVCodecContext *avctx, void *data,
603  int *got_frame, AVPacket *avpkt)
604 {
605  ArgoContext *s = avctx->priv_data;
606  GetByteContext *gb = &s->gb;
607  AVFrame *frame = s->frame;
608  uint32_t chunk;
609  int ret;
610 
611  if (avpkt->size < 4)
612  return AVERROR_INVALIDDATA;
613 
614  bytestream2_init(gb, avpkt->data, avpkt->size);
615 
616  if ((ret = ff_reget_buffer(avctx, frame, 0)) < 0)
617  return ret;
618 
619  chunk = bytestream2_get_be32(gb);
620  switch (chunk) {
621  case MKBETAG('P', 'A', 'L', '8'):
622  for (int y = 0; y < frame->height; y++)
623  memset(frame->data[0] + y * frame->linesize[0], 0, frame->width * s->bpp);
624  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
625  memset(frame->data[1], 0, AVPALETTE_SIZE);
626  return decode_pal8(avctx, s->pal);
627  case MKBETAG('M', 'A', 'D', '1'):
628  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
629  ret = decode_mad1(avctx, frame);
630  else
631  ret = decode_mad1_24(avctx, frame);
632  break;
633  case MKBETAG('A', 'V', 'C', 'F'):
634  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
635  s->key = 1;
636  ret = decode_avcf(avctx, frame);
637  break;
638  }
639  case MKBETAG('A', 'L', 'C', 'D'):
640  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
641  s->key = 0;
642  ret = decode_alcd(avctx, frame);
643  break;
644  }
645  case MKBETAG('R', 'L', 'E', 'F'):
646  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
647  s->key = 1;
648  ret = decode_rle(avctx, frame);
649  break;
650  }
651  case MKBETAG('R', 'L', 'E', 'D'):
652  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
653  s->key = 0;
654  ret = decode_rle(avctx, frame);
655  break;
656  }
657  default:
658  av_log(avctx, AV_LOG_DEBUG, "unknown chunk 0x%X\n", chunk);
659  break;
660  }
661 
662  if (ret < 0)
663  return ret;
664 
665  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
666  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
667 
668  if ((ret = av_frame_ref(data, s->frame)) < 0)
669  return ret;
670 
671  frame->pict_type = s->key ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
672  frame->key_frame = s->key;
673  *got_frame = 1;
674 
675  return avpkt->size;
676 }
677 
679 {
680  ArgoContext *s = avctx->priv_data;
681 
682  switch (avctx->bits_per_raw_sample) {
683  case 8: s->bpp = 1;
684  avctx->pix_fmt = AV_PIX_FMT_PAL8; break;
685  case 24: s->bpp = 4;
686  avctx->pix_fmt = AV_PIX_FMT_BGR0; break;
687  default: avpriv_request_sample(s, "depth == %u", avctx->bits_per_raw_sample);
688  return AVERROR_PATCHWELCOME;
689  }
690 
691  if (avctx->width % 2 || avctx->height % 2) {
692  avpriv_request_sample(s, "Odd dimensions\n");
693  return AVERROR_PATCHWELCOME;
694  }
695 
696  s->frame = av_frame_alloc();
697  if (!s->frame)
698  return AVERROR(ENOMEM);
699 
700  for (int n = 0, i = -4; i < 4; i++) {
701  for (int j = -14; j < 2; j++) {
702  s->mv0[n][0] = j;
703  s->mv0[n++][1] = i;
704  }
705  }
706 
707  for (int n = 0, i = -5; i <= 1; i += 2) {
708  int j = -5;
709 
710  while (j <= 1) {
711  s->mv1[n][0] = j;
712  s->mv1[n++][1] = i;
713  j += 2;
714  }
715  }
716 
717  return 0;
718 }
719 
720 static void decode_flush(AVCodecContext *avctx)
721 {
722  ArgoContext *s = avctx->priv_data;
723 
724  av_frame_unref(s->frame);
725 }
726 
728 {
729  ArgoContext *s = avctx->priv_data;
730 
731  av_frame_free(&s->frame);
732 
733  return 0;
734 }
735 
737  .name = "argo",
738  .long_name = NULL_IF_CONFIG_SMALL("Argonaut Games Video"),
739  .type = AVMEDIA_TYPE_VIDEO,
740  .id = AV_CODEC_ID_ARGO,
741  .priv_data_size = sizeof(ArgoContext),
742  .init = decode_init,
743  .decode = decode_frame,
744  .flush = decode_flush,
745  .close = decode_close,
746  .capabilities = AV_CODEC_CAP_DR1,
747  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
748 };
decode_close
static av_cold int decode_close(AVCodecContext *avctx)
Definition: argo.c:727
AVCodec
AVCodec.
Definition: codec.h:197
av_clip
#define av_clip
Definition: common.h:122
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
GetByteContext
Definition: bytestream.h:33
bytestream2_skipu
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:174
ArgoContext::frame
AVFrame * frame
Definition: argo.c:43
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
decode_flush
static void decode_flush(AVCodecContext *avctx)
Definition: argo.c:720
w
uint8_t w
Definition: llviddspenc.c:39
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:369
b
#define b
Definition: input.c:41
data
const char data[16]
Definition: mxf.c:142
ArgoContext::gb
GetByteContext gb
Definition: argo.c:36
ff_argo_decoder
AVCodec ff_argo_decoder
Definition: argo.c:736
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
ArgoContext::mv1
int mv1[16][2]
Definition: argo.c:41
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
FFMIN3
#define FFMIN3(a, b, c)
Definition: common.h:106
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
av_cold
#define av_cold
Definition: attributes.h:90
decode
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
GetByteContext::buffer
const uint8_t * buffer
Definition: bytestream.h:34
AV_CODEC_ID_ARGO
@ AV_CODEC_ID_ARGO
Definition: codec_id.h:306
bits
uint8_t bits
Definition: vp3data.h:141
AVCodecContext::bits_per_raw_sample
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1747
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
ArgoContext::key
int key
Definition: argo.c:39
if
if(ret)
Definition: filter_design.txt:179
ArgoContext
Definition: argo.c:35
decode_avcf
static int decode_avcf(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:67
flush
static void flush(AVCodecContext *avctx)
Definition: aacdec_template.c:592
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
pixel
uint8_t pixel
Definition: tiny_ssim.c:42
decode_rle
static int decode_rle(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:562
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
src
#define src
Definition: vp8dsp.c:255
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
decode_mad1
static int decode_mad1(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:144
index
int index
Definition: gxfenc.c:89
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:370
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
size
int size
Definition: twinvq_data.h:10344
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: common.h:479
ArgoContext::pal
uint32_t pal[256]
Definition: argo.c:42
ArgoContext::mv0
int mv0[128][2]
Definition: argo.c:40
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
decode_alcd
static int decode_alcd(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:96
i
int i
Definition: input.c:407
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
ArgoContext::bpp
int bpp
Definition: argo.c:38
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:49
internal.h
decode_frame
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: argo.c:602
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:204
AVCodecContext::height
int height
Definition: avcodec.h:709
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:746
decode_pal8
static int decode_pal8(AVCodecContext *avctx, uint32_t *pal)
Definition: argo.c:46
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ff_reget_buffer
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Identical in function to ff_get_buffer(), except it reuses the existing buffer if available.
Definition: decode.c:2007
ret
ret
Definition: filter_design.txt:187
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
pos
unsigned int pos
Definition: spdifenc.c:412
AVCodecContext
main external API structure.
Definition: avcodec.h:536
decode_mad1_24
static int decode_mad1_24(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:354
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:275
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
bytestream2_get_bufferu
static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:277
avpriv_request_sample
#define avpriv_request_sample(...)
Definition: tableprint_vlc.h:39
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
AVPacket
This structure stores compressed data.
Definition: packet.h:346
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:563
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:709
bytestream.h
imgutils.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
h
h
Definition: vp9dsp_template.c:2038
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: argo.c:678
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:372