FFmpeg
argo.c
Go to the documentation of this file.
1 /*
2  * Argonaut Games Video decoder
3  * Copyright (c) 2020 Paul B Mahol
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <string.h>
23 
24 #include "libavutil/internal.h"
25 #include "libavutil/intreadwrite.h"
26 
27 #include "avcodec.h"
28 #include "bytestream.h"
29 #include "codec_internal.h"
30 #include "decode.h"
31 
32 typedef struct ArgoContext {
34 
35  int bpp;
36  int key;
37  int mv0[128][2];
38  int mv1[16][2];
39  uint32_t pal[256];
41 } ArgoContext;
42 
43 static int decode_pal8(AVCodecContext *avctx, uint32_t *pal)
44 {
45  ArgoContext *s = avctx->priv_data;
46  GetByteContext *gb = &s->gb;
47  int start, count;
48 
49  start = bytestream2_get_le16(gb);
50  count = bytestream2_get_le16(gb);
51 
52  if (start + count > 256)
53  return AVERROR_INVALIDDATA;
54 
55  if (bytestream2_get_bytes_left(gb) < 3 * count)
56  return AVERROR_INVALIDDATA;
57 
58  for (int i = 0; i < count; i++)
59  pal[start + i] = (0xFFU << 24) | bytestream2_get_be24u(gb);
60 
61  return 0;
62 }
63 
65 {
66  ArgoContext *s = avctx->priv_data;
67  GetByteContext *gb = &s->gb;
68  const int l = frame->linesize[0];
69  const uint8_t *map = gb->buffer;
70  uint8_t *dst = frame->data[0];
71 
72  if (bytestream2_get_bytes_left(gb) < 1024 + (frame->width / 2) * (frame->height / 2))
73  return AVERROR_INVALIDDATA;
74 
75  bytestream2_skipu(gb, 1024);
76  for (int y = 0; y < frame->height; y += 2) {
77  for (int x = 0; x < frame->width; x += 2) {
78  int index = bytestream2_get_byteu(gb);
79  const uint8_t *block = map + index * 4;
80 
81  dst[x+0] = block[0];
82  dst[x+1] = block[1];
83  dst[x+l] = block[2];
84  dst[x+l+1] = block[3];
85  }
86 
87  dst += frame->linesize[0] * 2;
88  }
89 
90  return 0;
91 }
92 
94 {
95  ArgoContext *s = avctx->priv_data;
96  GetByteContext *gb = &s->gb;
97  GetByteContext sb;
98  const int l = frame->linesize[0];
99  const uint8_t *map = gb->buffer;
100  uint8_t *dst = frame->data[0];
101  uint8_t codes = 0;
102  int count = 0;
103 
104  if (bytestream2_get_bytes_left(gb) < 1024 + (((frame->width / 2) * (frame->height / 2) + 7) >> 3))
105  return AVERROR_INVALIDDATA;
106 
107  bytestream2_skipu(gb, 1024);
108  sb = *gb;
109  bytestream2_skipu(gb, ((frame->width / 2) * (frame->height / 2) + 7) >> 3);
110 
111  for (int y = 0; y < frame->height; y += 2) {
112  for (int x = 0; x < frame->width; x += 2) {
113  const uint8_t *block;
114  int index;
115 
116  if (count == 0) {
117  codes = bytestream2_get_byteu(&sb);
118  count = 8;
119  }
120 
121  if (codes & 0x80) {
122  index = bytestream2_get_byte(gb);
123  block = map + index * 4;
124 
125  dst[x+0] = block[0];
126  dst[x+1] = block[1];
127  dst[x+l] = block[2];
128  dst[x+l+1] = block[3];
129  }
130 
131  codes <<= 1;
132  count--;
133  }
134 
135  dst += frame->linesize[0] * 2;
136  }
137 
138  return 0;
139 }
140 
142 {
143  ArgoContext *s = avctx->priv_data;
144  GetByteContext *gb = &s->gb;
145  const int w = frame->width;
146  const int h = frame->height;
147  const int l = frame->linesize[0];
148 
149  while (bytestream2_get_bytes_left(gb) > 0) {
150  int size, type, pos, dy;
151  uint8_t *dst;
152 
153  type = bytestream2_get_byte(gb);
154  if (type == 0xFF)
155  break;
156 
157  switch (type) {
158  case 8:
159  dst = frame->data[0];
160  for (int y = 0; y < h; y += 8) {
161  for (int x = 0; x < w; x += 8) {
162  int fill = bytestream2_get_byte(gb);
163  uint8_t *ddst = dst + x;
164 
165  for (int by = 0; by < 8; by++) {
166  memset(ddst, fill, 8);
167  ddst += l;
168  }
169  }
170 
171  dst += 8 * l;
172  }
173  break;
174  case 7:
175  while (bytestream2_get_bytes_left(gb) > 0) {
176  int bsize = bytestream2_get_byte(gb);
177  uint8_t *src;
178  int count;
179 
180  if (!bsize)
181  break;
182 
183  count = bytestream2_get_be16(gb);
184  while (count > 0) {
185  int mvx, mvy, a, b, c, mx, my;
186  int bsize_w, bsize_h;
187 
188  bsize_w = bsize_h = bsize;
189  if (bytestream2_get_bytes_left(gb) < 4)
190  return AVERROR_INVALIDDATA;
191  mvx = bytestream2_get_byte(gb) * bsize;
192  mvy = bytestream2_get_byte(gb) * bsize;
193  a = bytestream2_get_byte(gb);
194  b = bytestream2_get_byte(gb);
195  c = ((a & 0x3F) << 8) + b;
196  mx = mvx + (c & 0x7F) - 64;
197  my = mvy + (c >> 7) - 64;
198 
199  if (mvy < 0 || mvy >= h)
200  return AVERROR_INVALIDDATA;
201 
202  if (mvx < 0 || mvx >= w)
203  return AVERROR_INVALIDDATA;
204 
205  if (my < 0 || my >= h)
206  return AVERROR_INVALIDDATA;
207 
208  if (mx < 0 || mx >= w)
209  return AVERROR_INVALIDDATA;
210 
211  dst = frame->data[0] + mvx + l * mvy;
212  src = frame->data[0] + mx + l * my;
213 
214  bsize_w = FFMIN3(bsize_w, w - mvx, w - mx);
215  bsize_h = FFMIN3(bsize_h, h - mvy, h - my);
216 
217  if (mvy >= my && (mvy != my || mvx >= mx)) {
218  src += (bsize_h - 1) * l;
219  dst += (bsize_h - 1) * l;
220  for (int by = 0; by < bsize_h; by++) {
221  memmove(dst, src, bsize_w);
222  src -= l;
223  dst -= l;
224  }
225  } else {
226  for (int by = 0; by < bsize_h; by++) {
227  memmove(dst, src, bsize_w);
228  src += l;
229  dst += l;
230  }
231  }
232 
233  count--;
234  }
235  }
236  break;
237  case 6:
238  dst = frame->data[0];
239  if (bytestream2_get_bytes_left(gb) < w * h)
240  return AVERROR_INVALIDDATA;
241  for (int y = 0; y < h; y++) {
243  dst += l;
244  }
245  break;
246  case 5:
247  dst = frame->data[0];
248  for (int y = 0; y < h; y += 2) {
249  for (int x = 0; x < w; x += 2) {
250  int fill = bytestream2_get_byte(gb);
251  uint8_t *ddst = dst + x;
252 
253  fill = (fill << 8) | fill;
254  for (int by = 0; by < 2; by++) {
255  AV_WN16(ddst, fill);
256 
257  ddst += l;
258  }
259  }
260 
261  dst += 2 * l;
262  }
263  break;
264  case 3:
265  size = bytestream2_get_le16(gb);
266  if (size > 0) {
267  int x = bytestream2_get_byte(gb) * 4;
268  int y = bytestream2_get_byte(gb) * 4;
269  int count = bytestream2_get_byte(gb);
270  int fill = bytestream2_get_byte(gb);
271 
272  av_log(avctx, AV_LOG_DEBUG, "%d %d %d %d\n", x, y, count, fill);
273  for (int i = 0; i < count; i++)
274  ;
275  return AVERROR_PATCHWELCOME;
276  }
277  break;
278  case 2:
279  dst = frame->data[0];
280  pos = 0;
281  dy = 0;
282  while (bytestream2_get_bytes_left(gb) > 0) {
283  int count = bytestream2_get_byteu(gb);
284  int skip = count & 0x3F;
285 
286  count = count >> 6;
287  if (skip == 0x3F) {
288  pos += 0x3E;
289  while (pos >= w) {
290  pos -= w;
291  dst += l;
292  dy++;
293  if (dy >= h)
294  return 0;
295  }
296  } else {
297  pos += skip;
298  while (pos >= w) {
299  pos -= w;
300  dst += l;
301  dy++;
302  if (dy >= h)
303  return 0;
304  }
305  while (count >= 0) {
306  int bits = bytestream2_get_byte(gb);
307 
308  for (int i = 0; i < 4; i++) {
309  switch (bits & 3) {
310  case 0:
311  break;
312  case 1:
313  if (dy < 1 && !pos)
314  return AVERROR_INVALIDDATA;
315  else
316  dst[pos] = pos ? dst[pos - 1] : dst[-l + w - 1];
317  break;
318  case 2:
319  if (dy < 1)
320  return AVERROR_INVALIDDATA;
321  dst[pos] = dst[pos - l];
322  break;
323  case 3:
324  dst[pos] = bytestream2_get_byte(gb);
325  break;
326  }
327 
328  pos++;
329  if (pos >= w) {
330  pos -= w;
331  dst += l;
332  dy++;
333  if (dy >= h)
334  return 0;
335  }
336  bits >>= 2;
337  }
338  count--;
339  }
340  }
341  }
342  break;
343  default:
344  return AVERROR_INVALIDDATA;
345  }
346  }
347 
348  return 0;
349 }
350 
352 {
353  ArgoContext *s = avctx->priv_data;
354  GetByteContext *gb = &s->gb;
355  const int w = frame->width;
356  const int h = frame->height;
357  const int l = frame->linesize[0] / 4;
358 
359  while (bytestream2_get_bytes_left(gb) > 0) {
360  int osize, type, pos, dy, di, bcode, value, v14;
361  const uint8_t *bits;
362  uint32_t *dst;
363 
364  type = bytestream2_get_byte(gb);
365  if (type == 0xFF)
366  return 0;
367 
368  switch (type) {
369  case 8:
370  dst = (uint32_t *)frame->data[0];
371  for (int y = 0; y + 12 <= h; y += 12) {
372  for (int x = 0; x + 12 <= w; x += 12) {
373  int fill = bytestream2_get_be24(gb);
374  uint32_t *dstp = dst + x;
375 
376  for (int by = 0; by < 12; by++) {
377  for (int bx = 0; bx < 12; bx++)
378  dstp[bx] = fill;
379 
380  dstp += l;
381  }
382  }
383 
384  dst += 12 * l;
385  }
386  break;
387  case 7:
388  while (bytestream2_get_bytes_left(gb) > 0) {
389  int bsize = bytestream2_get_byte(gb);
390  uint32_t *src;
391  int count;
392 
393  if (!bsize)
394  break;
395 
396  count = bytestream2_get_be16(gb);
397  while (count > 0) {
398  int mvx, mvy, a, b, c, mx, my;
399  int bsize_w, bsize_h;
400 
401  bsize_w = bsize_h = bsize;
402  if (bytestream2_get_bytes_left(gb) < 4)
403  return AVERROR_INVALIDDATA;
404  mvx = bytestream2_get_byte(gb) * bsize;
405  mvy = bytestream2_get_byte(gb) * bsize;
406  a = bytestream2_get_byte(gb);
407  b = bytestream2_get_byte(gb);
408  c = ((a & 0x3F) << 8) + b;
409  mx = mvx + (c & 0x7F) - 64;
410  my = mvy + (c >> 7) - 64;
411 
412  if (mvy < 0 || mvy >= h)
413  return AVERROR_INVALIDDATA;
414 
415  if (mvx < 0 || mvx >= w)
416  return AVERROR_INVALIDDATA;
417 
418  if (my < 0 || my >= h)
419  return AVERROR_INVALIDDATA;
420 
421  if (mx < 0 || mx >= w)
422  return AVERROR_INVALIDDATA;
423 
424  dst = (uint32_t *)frame->data[0] + mvx + l * mvy;
425  src = (uint32_t *)frame->data[0] + mx + l * my;
426 
427  bsize_w = FFMIN3(bsize_w, w - mvx, w - mx);
428  bsize_h = FFMIN3(bsize_h, h - mvy, h - my);
429 
430  if (mvy >= my && (mvy != my || mvx >= mx)) {
431  src += (bsize_h - 1) * l;
432  dst += (bsize_h - 1) * l;
433  for (int by = 0; by < bsize_h; by++) {
434  memmove(dst, src, bsize_w * 4);
435  src -= l;
436  dst -= l;
437  }
438  } else {
439  for (int by = 0; by < bsize_h; by++) {
440  memmove(dst, src, bsize_w * 4);
441  src += l;
442  dst += l;
443  }
444  }
445 
446  count--;
447  }
448  }
449  break;
450  case 12:
451  osize = ((h + 3) / 4) * ((w + 3) / 4) + 7;
452  bits = gb->buffer;
453  di = 0;
454  bcode = v14 = 0;
455  if (bytestream2_get_bytes_left(gb) < osize >> 3)
456  return AVERROR_INVALIDDATA;
457  bytestream2_skip(gb, osize >> 3);
458  for (int x = 0; x < w; x += 4) {
459  for (int y = 0; y < h; y += 4) {
460  int astate = 0;
461 
462  if (bits[di >> 3] & (1 << (di & 7))) {
463  int codes = bytestream2_get_byte(gb);
464 
465  for (int count = 0; count < 4; count++) {
466  uint32_t *src = (uint32_t *)frame->data[0];
467  size_t src_size = l * (h - 1) + (w - 1);
468  int nv, v, code = codes & 3;
469 
470  pos = x;
471  dy = y + count;
472  dst = (uint32_t *)frame->data[0] + pos + dy * l;
473  if (code & 1)
474  bcode = bytestream2_get_byte(gb);
475  if (code == 3) {
476  for (int j = 0; j < 4; j++) {
477  switch (bcode & 3) {
478  case 0:
479  break;
480  case 1:
481  if (dy < 1 && !pos)
482  return AVERROR_INVALIDDATA;
483  dst[0] = dst[-1];
484  break;
485  case 2:
486  if (dy < 1)
487  return AVERROR_INVALIDDATA;
488  dst[0] = dst[-l];
489  break;
490  case 3:
491  if (astate) {
492  nv = value >> 4;
493  } else {
494  value = bytestream2_get_byte(gb);
495  nv = value & 0xF;
496  }
497  astate ^= 1;
498  dst[0] = src[av_clip(l * (dy + s->mv1[nv][1]) + pos +
499  s->mv1[nv][0], 0, src_size)];
500  break;
501  }
502 
503  bcode >>= 2;
504  dst++;
505  pos++;
506  }
507  } else if (code) {
508  if (code == 1)
509  v14 = bcode;
510  else
511  bcode = v14;
512  for (int j = 0; j < 4; j++) {
513  switch (bcode & 3) {
514  case 0:
515  break;
516  case 1:
517  if (dy < 1 && !pos)
518  return AVERROR_INVALIDDATA;
519  dst[0] = dst[-1];
520  break;
521  case 2:
522  if (dy < 1)
523  return AVERROR_INVALIDDATA;
524  dst[0] = dst[-l];
525  break;
526  case 3:
527  v = bytestream2_get_byte(gb);
528  if (v < 128) {
529  dst[0] = src[av_clip(l * (dy + s->mv0[v][1]) + pos +
530  s->mv0[v][0], 0, src_size)];
531  } else {
532  dst[0] = ((v & 0x7F) << 17) | bytestream2_get_be16(gb);
533  }
534  break;
535  }
536 
537  bcode >>= 2;
538  dst++;
539  pos++;
540  }
541  }
542 
543  codes >>= 2;
544  }
545  }
546 
547  di++;
548  }
549  }
550  break;
551  default:
552  return AVERROR_INVALIDDATA;
553  }
554  }
555 
556  return AVERROR_INVALIDDATA;
557 }
558 
560 {
561  ArgoContext *s = avctx->priv_data;
562  GetByteContext *gb = &s->gb;
563  const int w = frame->width;
564  const int h = frame->height;
565  const int l = frame->linesize[0];
566  uint8_t *dst = frame->data[0];
567  int pos = 0, y = 0;
568 
569  while (bytestream2_get_bytes_left(gb) > 0) {
570  int count = bytestream2_get_byte(gb);
571  int pixel = bytestream2_get_byte(gb);
572 
573  if (!count) {
574  pos += pixel;
575  while (pos >= w) {
576  pos -= w;
577  y++;
578  if (y >= h)
579  return 0;
580  }
581  } else {
582  while (count > 0) {
583  dst[pos + y * l] = pixel;
584  count--;
585  pos++;
586  if (pos >= w) {
587  pos = 0;
588  y++;
589  if (y >= h)
590  return 0;
591  }
592  }
593  }
594  }
595 
596  return 0;
597 }
598 
599 static int decode_frame(AVCodecContext *avctx, AVFrame *rframe,
600  int *got_frame, AVPacket *avpkt)
601 {
602  ArgoContext *s = avctx->priv_data;
603  GetByteContext *gb = &s->gb;
604  AVFrame *frame = s->frame;
605  uint32_t chunk;
606  int ret;
607 
608  if (avpkt->size < 4)
609  return AVERROR_INVALIDDATA;
610 
611  bytestream2_init(gb, avpkt->data, avpkt->size);
612 
613  if ((ret = ff_reget_buffer(avctx, frame, 0)) < 0)
614  return ret;
615 
616  chunk = bytestream2_get_be32(gb);
617  switch (chunk) {
618  case MKBETAG('P', 'A', 'L', '8'):
619  for (int y = 0; y < frame->height; y++)
620  memset(frame->data[0] + y * frame->linesize[0], 0, frame->width * s->bpp);
621  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
622  memset(frame->data[1], 0, AVPALETTE_SIZE);
623  return decode_pal8(avctx, s->pal);
624  case MKBETAG('M', 'A', 'D', '1'):
625  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
626  ret = decode_mad1(avctx, frame);
627  else
628  ret = decode_mad1_24(avctx, frame);
629  break;
630  case MKBETAG('A', 'V', 'C', 'F'):
631  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
632  s->key = 1;
633  ret = decode_avcf(avctx, frame);
634  break;
635  }
636  case MKBETAG('A', 'L', 'C', 'D'):
637  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
638  s->key = 0;
639  ret = decode_alcd(avctx, frame);
640  break;
641  }
642  case MKBETAG('R', 'L', 'E', 'F'):
643  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
644  s->key = 1;
645  ret = decode_rle(avctx, frame);
646  break;
647  }
648  case MKBETAG('R', 'L', 'E', 'D'):
649  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
650  s->key = 0;
651  ret = decode_rle(avctx, frame);
652  break;
653  }
654  default:
655  av_log(avctx, AV_LOG_DEBUG, "unknown chunk 0x%X\n", chunk);
656  break;
657  }
658 
659  if (ret < 0)
660  return ret;
661 
662  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
663  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
664 
665  if ((ret = av_frame_ref(rframe, s->frame)) < 0)
666  return ret;
667 
668  frame->pict_type = s->key ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
669  if (s->key)
670  frame->flags |= AV_FRAME_FLAG_KEY;
671  else
672  frame->flags &= ~AV_FRAME_FLAG_KEY;
673  *got_frame = 1;
674 
675  return avpkt->size;
676 }
677 
679 {
680  ArgoContext *s = avctx->priv_data;
681 
682  switch (avctx->bits_per_coded_sample) {
683  case 8: s->bpp = 1;
684  avctx->pix_fmt = AV_PIX_FMT_PAL8; break;
685  case 24: s->bpp = 4;
686  avctx->pix_fmt = AV_PIX_FMT_BGR0; break;
687  default: avpriv_request_sample(s, "depth == %u", avctx->bits_per_coded_sample);
688  return AVERROR_PATCHWELCOME;
689  }
690 
691  if (avctx->width % 2 || avctx->height % 2) {
692  avpriv_request_sample(s, "Odd dimensions\n");
693  return AVERROR_PATCHWELCOME;
694  }
695 
696  s->frame = av_frame_alloc();
697  if (!s->frame)
698  return AVERROR(ENOMEM);
699 
700  for (int n = 0, i = -4; i < 4; i++) {
701  for (int j = -14; j < 2; j++) {
702  s->mv0[n][0] = j;
703  s->mv0[n++][1] = i;
704  }
705  }
706 
707  for (int n = 0, i = -5; i <= 1; i += 2) {
708  int j = -5;
709 
710  while (j <= 1) {
711  s->mv1[n][0] = j;
712  s->mv1[n++][1] = i;
713  j += 2;
714  }
715  }
716 
717  return 0;
718 }
719 
720 static void decode_flush(AVCodecContext *avctx)
721 {
722  ArgoContext *s = avctx->priv_data;
723 
724  av_frame_unref(s->frame);
725 }
726 
728 {
729  ArgoContext *s = avctx->priv_data;
730 
731  av_frame_free(&s->frame);
732 
733  return 0;
734 }
735 
737  .p.name = "argo",
738  CODEC_LONG_NAME("Argonaut Games Video"),
739  .p.type = AVMEDIA_TYPE_VIDEO,
740  .p.id = AV_CODEC_ID_ARGO,
741  .priv_data_size = sizeof(ArgoContext),
742  .init = decode_init,
744  .flush = decode_flush,
745  .close = decode_close,
746  .p.capabilities = AV_CODEC_CAP_DR1,
747  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
748 };
decode_close
static av_cold int decode_close(AVCodecContext *avctx)
Definition: argo.c:727
av_clip
#define av_clip
Definition: common.h:100
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:43
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
GetByteContext
Definition: bytestream.h:33
bytestream2_skipu
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:174
ArgoContext::frame
AVFrame * frame
Definition: argo.c:40
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
decode_flush
static void decode_flush(AVCodecContext *avctx)
Definition: argo.c:720
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:539
b
#define b
Definition: input.c:41
FFCodec
Definition: codec_internal.h:127
ArgoContext::gb
GetByteContext gb
Definition: argo.c:33
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
mx
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t mx
Definition: dsp.h:53
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
ArgoContext::mv1
int mv1[16][2]
Definition: argo.c:38
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
av_cold
#define av_cold
Definition: attributes.h:90
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:640
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:311
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
GetByteContext::buffer
const uint8_t * buffer
Definition: bytestream.h:34
AV_CODEC_ID_ARGO
@ AV_CODEC_ID_ARGO
Definition: codec_id.h:311
bits
uint8_t bits
Definition: vp3data.h:128
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:230
decode.h
ArgoContext::key
int key
Definition: argo.c:36
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:296
my
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t my
Definition: dsp.h:53
if
if(ret)
Definition: filter_design.txt:179
ArgoContext
Definition: argo.c:32
decode_avcf
static int decode_avcf(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:64
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
pixel
uint8_t pixel
Definition: tiny_ssim.c:41
decode_rle
static int decode_rle(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:559
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *rframe, int *got_frame, AVPacket *avpkt)
Definition: argo.c:599
decode_mad1
static int decode_mad1(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:141
index
int index
Definition: gxfenc.c:90
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:540
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:400
codec_internal.h
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
size
int size
Definition: twinvq_data.h:10344
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
ArgoContext::pal
uint32_t pal[256]
Definition: argo.c:39
ArgoContext::mv0
int mv0[128][2]
Definition: argo.c:37
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
decode_alcd
static int decode_alcd(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:93
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1578
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
ArgoContext::bpp
int bpp
Definition: argo.c:35
FFMIN3
#define FFMIN3(a, b, c)
Definition: macros.h:50
internal.h
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:622
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
AVCodecContext::height
int height
Definition: avcodec.h:624
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:663
decode_pal8
static int decode_pal8(AVCodecContext *avctx, uint32_t *pal)
Definition: argo.c:43
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ff_reget_buffer
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Identical in function to ff_get_buffer(), except it reuses the existing buffer if available.
Definition: decode.c:1815
ret
ret
Definition: filter_design.txt:187
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
pos
unsigned int pos
Definition: spdifenc.c:414
ff_argo_decoder
const FFCodec ff_argo_decoder
Definition: argo.c:736
AVCodecContext
main external API structure.
Definition: avcodec.h:451
decode_mad1_24
static int decode_mad1_24(AVCodecContext *avctx, AVFrame *frame)
Definition: argo.c:351
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
bytestream2_get_bufferu
static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:277
avpriv_request_sample
#define avpriv_request_sample(...)
Definition: tableprint_vlc.h:36
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:624
bytestream.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
h
h
Definition: vp9dsp_template.c:2070
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: argo.c:678
skip
static void BS_FUNC() skip(BSCTX *bc, unsigned int n)
Skip n bits in the buffer.
Definition: bitstream_template.h:375
src
#define src
Definition: vp8dsp.c:248
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:368