FFmpeg
ffv1dec.c
Go to the documentation of this file.
1 /*
2  * FFV1 decoder
3  *
4  * Copyright (c) 2003-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * FF Video Codec 1 (a lossless codec) decoder
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/crc.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/imgutils.h"
32 #include "libavutil/pixdesc.h"
33 #include "avcodec.h"
34 #include "codec_internal.h"
35 #include "get_bits.h"
36 #include "rangecoder.h"
37 #include "golomb.h"
38 #include "mathops.h"
39 #include "ffv1.h"
40 #include "thread.h"
41 #include "threadframe.h"
42 
43 static inline av_flatten int get_symbol_inline(RangeCoder *c, uint8_t *state,
44  int is_signed)
45 {
46  if (get_rac(c, state + 0))
47  return 0;
48  else {
49  int i, e;
50  unsigned a;
51  e = 0;
52  while (get_rac(c, state + 1 + FFMIN(e, 9))) { // 1..10
53  e++;
54  if (e > 31)
55  return AVERROR_INVALIDDATA;
56  }
57 
58  a = 1;
59  for (i = e - 1; i >= 0; i--)
60  a += a + get_rac(c, state + 22 + FFMIN(i, 9)); // 22..31
61 
62  e = -(is_signed && get_rac(c, state + 11 + FFMIN(e, 10))); // 11..21
63  return (a ^ e) - e;
64  }
65 }
66 
67 static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
68 {
69  return get_symbol_inline(c, state, is_signed);
70 }
71 
72 static inline int get_vlc_symbol(GetBitContext *gb, VlcState *const state,
73  int bits)
74 {
75  int k, i, v, ret;
76 
77  i = state->count;
78  k = 0;
79  while (i < state->error_sum) { // FIXME: optimize
80  k++;
81  i += i;
82  }
83 
84  v = get_sr_golomb(gb, k, 12, bits);
85  ff_dlog(NULL, "v:%d bias:%d error:%d drift:%d count:%d k:%d",
86  v, state->bias, state->error_sum, state->drift, state->count, k);
87 
88  v ^= ((2 * state->drift + state->count) >> 31);
89 
90  ret = fold(v + state->bias, bits);
91 
93 
94  return ret;
95 }
96 
98 {
99  if (s->ac != AC_GOLOMB_RICE) {
100  RangeCoder *const c = &s->c;
101  if (c->overread > MAX_OVERREAD)
102  return AVERROR_INVALIDDATA;
103  } else {
104  if (get_bits_left(&s->gb) < 1)
105  return AVERROR_INVALIDDATA;
106  }
107  return 0;
108 }
109 
110 #define TYPE int16_t
111 #define RENAME(name) name
112 #include "ffv1dec_template.c"
113 #undef TYPE
114 #undef RENAME
115 
116 #define TYPE int32_t
117 #define RENAME(name) name ## 32
118 #include "ffv1dec_template.c"
119 
120 static int decode_plane(FFV1Context *s, uint8_t *src,
121  int w, int h, int stride, int plane_index,
122  int pixel_stride)
123 {
124  int x, y;
125  int16_t *sample[2];
126  sample[0] = s->sample_buffer + 3;
127  sample[1] = s->sample_buffer + w + 6 + 3;
128 
129  s->run_index = 0;
130 
131  memset(s->sample_buffer, 0, 2 * (w + 6) * sizeof(*s->sample_buffer));
132 
133  for (y = 0; y < h; y++) {
134  int16_t *temp = sample[0]; // FIXME: try a normal buffer
135 
136  sample[0] = sample[1];
137  sample[1] = temp;
138 
139  sample[1][-1] = sample[0][0];
140  sample[0][w] = sample[0][w - 1];
141 
142  if (s->avctx->bits_per_raw_sample <= 8) {
143  int ret = decode_line(s, w, sample, plane_index, 8);
144  if (ret < 0)
145  return ret;
146  for (x = 0; x < w; x++)
147  src[x*pixel_stride + stride * y] = sample[1][x];
148  } else {
149  int ret = decode_line(s, w, sample, plane_index, s->avctx->bits_per_raw_sample);
150  if (ret < 0)
151  return ret;
152  if (s->packed_at_lsb) {
153  for (x = 0; x < w; x++) {
154  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x];
155  }
156  } else {
157  for (x = 0; x < w; x++) {
158  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x] << (16 - s->avctx->bits_per_raw_sample) | ((uint16_t **)sample)[1][x] >> (2 * s->avctx->bits_per_raw_sample - 16);
159  }
160  }
161  }
162  }
163  return 0;
164 }
165 
167 {
168  RangeCoder *c = &fs->c;
169  uint8_t state[CONTEXT_SIZE];
170  unsigned ps, i, context_count;
171  memset(state, 128, sizeof(state));
172 
173  av_assert0(f->version > 2);
174 
175  fs->slice_x = get_symbol(c, state, 0) * f->width ;
176  fs->slice_y = get_symbol(c, state, 0) * f->height;
177  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
178  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
179 
180  fs->slice_x /= f->num_h_slices;
181  fs->slice_y /= f->num_v_slices;
182  fs->slice_width = fs->slice_width /f->num_h_slices - fs->slice_x;
183  fs->slice_height = fs->slice_height/f->num_v_slices - fs->slice_y;
184  if ((unsigned)fs->slice_width > f->width || (unsigned)fs->slice_height > f->height)
185  return -1;
186  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
187  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
188  return -1;
189 
190  if (fs->ac == AC_GOLOMB_RICE && fs->slice_width >= (1<<23))
191  return AVERROR_INVALIDDATA;
192 
193  for (i = 0; i < f->plane_count; i++) {
194  PlaneContext * const p = &fs->plane[i];
195  int idx = get_symbol(c, state, 0);
196  if (idx >= (unsigned)f->quant_table_count) {
197  av_log(f->avctx, AV_LOG_ERROR, "quant_table_index out of range\n");
198  return -1;
199  }
200  p->quant_table_index = idx;
201  memcpy(p->quant_table, f->quant_tables[idx], sizeof(p->quant_table));
202  context_count = f->context_count[idx];
203 
204  if (p->context_count < context_count) {
205  av_freep(&p->state);
206  av_freep(&p->vlc_state);
207  }
209  }
210 
211  ps = get_symbol(c, state, 0);
212  if (ps == 1) {
213  f->cur->interlaced_frame = 1;
214  f->cur->top_field_first = 1;
215  } else if (ps == 2) {
216  f->cur->interlaced_frame = 1;
217  f->cur->top_field_first = 0;
218  } else if (ps == 3) {
219  f->cur->interlaced_frame = 0;
220  }
221  f->cur->sample_aspect_ratio.num = get_symbol(c, state, 0);
222  f->cur->sample_aspect_ratio.den = get_symbol(c, state, 0);
223 
224  if (av_image_check_sar(f->width, f->height,
225  f->cur->sample_aspect_ratio) < 0) {
226  av_log(f->avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
227  f->cur->sample_aspect_ratio.num,
228  f->cur->sample_aspect_ratio.den);
229  f->cur->sample_aspect_ratio = (AVRational){ 0, 1 };
230  }
231 
232  if (fs->version > 3) {
233  fs->slice_reset_contexts = get_rac(c, state);
234  fs->slice_coding_mode = get_symbol(c, state, 0);
235  if (fs->slice_coding_mode != 1) {
236  fs->slice_rct_by_coef = get_symbol(c, state, 0);
237  fs->slice_rct_ry_coef = get_symbol(c, state, 0);
238  if ((uint64_t)fs->slice_rct_by_coef + (uint64_t)fs->slice_rct_ry_coef > 4) {
239  av_log(f->avctx, AV_LOG_ERROR, "slice_rct_y_coef out of range\n");
240  return AVERROR_INVALIDDATA;
241  }
242  }
243  }
244 
245  return 0;
246 }
247 
248 static int decode_slice(AVCodecContext *c, void *arg)
249 {
250  FFV1Context *fs = *(void **)arg;
251  FFV1Context *f = fs->avctx->priv_data;
252  int width, height, x, y, ret;
253  const int ps = av_pix_fmt_desc_get(c->pix_fmt)->comp[0].step;
254  AVFrame * const p = f->cur;
255  int i, si;
256 
257  for( si=0; fs != f->slice_context[si]; si ++)
258  ;
259 
260  if(f->fsrc && !p->key_frame)
261  ff_thread_await_progress(&f->last_picture, si, 0);
262 
263  if(f->fsrc && !p->key_frame) {
264  FFV1Context *fssrc = f->fsrc->slice_context[si];
265  FFV1Context *fsdst = f->slice_context[si];
266  av_assert1(fsdst->plane_count == fssrc->plane_count);
267  av_assert1(fsdst == fs);
268 
269  if (!p->key_frame)
270  fsdst->slice_damaged |= fssrc->slice_damaged;
271 
272  for (i = 0; i < f->plane_count; i++) {
273  PlaneContext *psrc = &fssrc->plane[i];
274  PlaneContext *pdst = &fsdst->plane[i];
275 
276  av_free(pdst->state);
277  av_free(pdst->vlc_state);
278  memcpy(pdst, psrc, sizeof(*pdst));
279  pdst->state = NULL;
280  pdst->vlc_state = NULL;
281 
282  if (fssrc->ac) {
284  memcpy(pdst->state, psrc->state, CONTEXT_SIZE * psrc->context_count);
285  } else {
286  pdst->vlc_state = av_malloc_array(sizeof(*pdst->vlc_state), psrc->context_count);
287  memcpy(pdst->vlc_state, psrc->vlc_state, sizeof(*pdst->vlc_state) * psrc->context_count);
288  }
289  }
290  }
291 
292  fs->slice_rct_by_coef = 1;
293  fs->slice_rct_ry_coef = 1;
294 
295  if (f->version > 2) {
296  if (ff_ffv1_init_slice_state(f, fs) < 0)
297  return AVERROR(ENOMEM);
298  if (decode_slice_header(f, fs) < 0) {
299  fs->slice_x = fs->slice_y = fs->slice_height = fs->slice_width = 0;
300  fs->slice_damaged = 1;
301  return AVERROR_INVALIDDATA;
302  }
303  }
304  if ((ret = ff_ffv1_init_slice_state(f, fs)) < 0)
305  return ret;
306  if (f->cur->key_frame || fs->slice_reset_contexts) {
308  } else if (fs->slice_damaged) {
309  return AVERROR_INVALIDDATA;
310  }
311 
312  width = fs->slice_width;
313  height = fs->slice_height;
314  x = fs->slice_x;
315  y = fs->slice_y;
316 
317  if (fs->ac == AC_GOLOMB_RICE) {
318  if (f->version == 3 && f->micro_version > 1 || f->version > 3)
319  get_rac(&fs->c, (uint8_t[]) { 129 });
320  fs->ac_byte_count = f->version > 2 || (!x && !y) ? fs->c.bytestream - fs->c.bytestream_start - 1 : 0;
321  init_get_bits(&fs->gb,
322  fs->c.bytestream_start + fs->ac_byte_count,
323  (fs->c.bytestream_end - fs->c.bytestream_start - fs->ac_byte_count) * 8);
324  }
325 
326  av_assert1(width && height);
327  if (f->colorspace == 0 && (f->chroma_planes || !fs->transparency)) {
328  const int chroma_width = AV_CEIL_RSHIFT(width, f->chroma_h_shift);
329  const int chroma_height = AV_CEIL_RSHIFT(height, f->chroma_v_shift);
330  const int cx = x >> f->chroma_h_shift;
331  const int cy = y >> f->chroma_v_shift;
332  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0], width, height, p->linesize[0], 0, 1);
333 
334  if (f->chroma_planes) {
335  decode_plane(fs, p->data[1] + ps*cx+cy*p->linesize[1], chroma_width, chroma_height, p->linesize[1], 1, 1);
336  decode_plane(fs, p->data[2] + ps*cx+cy*p->linesize[2], chroma_width, chroma_height, p->linesize[2], 1, 1);
337  }
338  if (fs->transparency)
339  decode_plane(fs, p->data[3] + ps*x + y*p->linesize[3], width, height, p->linesize[3], (f->version >= 4 && !f->chroma_planes) ? 1 : 2, 1);
340  } else if (f->colorspace == 0) {
341  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] , width, height, p->linesize[0], 0, 2);
342  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] + 1, width, height, p->linesize[0], 1, 2);
343  } else if (f->use32bit) {
344  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
345  p->data[1] + ps * x + y * p->linesize[1],
346  p->data[2] + ps * x + y * p->linesize[2],
347  p->data[3] + ps * x + y * p->linesize[3] };
348  decode_rgb_frame32(fs, planes, width, height, p->linesize);
349  } else {
350  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
351  p->data[1] + ps * x + y * p->linesize[1],
352  p->data[2] + ps * x + y * p->linesize[2],
353  p->data[3] + ps * x + y * p->linesize[3] };
355  }
356  if (fs->ac != AC_GOLOMB_RICE && f->version > 2) {
357  int v;
358  get_rac(&fs->c, (uint8_t[]) { 129 });
359  v = fs->c.bytestream_end - fs->c.bytestream - 2 - 5*f->ec;
360  if (v) {
361  av_log(f->avctx, AV_LOG_ERROR, "bytestream end mismatching by %d\n", v);
362  fs->slice_damaged = 1;
363  }
364  }
365 
366  ff_thread_report_progress(&f->picture, si, 0);
367 
368  return 0;
369 }
370 
371 static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
372 {
373  int v;
374  int i = 0;
375  uint8_t state[CONTEXT_SIZE];
376 
377  memset(state, 128, sizeof(state));
378 
379  for (v = 0; i < 128; v++) {
380  unsigned len = get_symbol(c, state, 0) + 1U;
381 
382  if (len > 128 - i || !len)
383  return AVERROR_INVALIDDATA;
384 
385  while (len--) {
386  quant_table[i] = scale * v;
387  i++;
388  }
389  }
390 
391  for (i = 1; i < 128; i++)
392  quant_table[256 - i] = -quant_table[i];
393  quant_table[128] = -quant_table[127];
394 
395  return 2 * v - 1;
396 }
397 
399  int16_t quant_table[MAX_CONTEXT_INPUTS][256])
400 {
401  int i;
402  int context_count = 1;
403 
404  for (i = 0; i < 5; i++) {
406  if (ret < 0)
407  return ret;
408  context_count *= ret;
409  if (context_count > 32768U) {
410  return AVERROR_INVALIDDATA;
411  }
412  }
413  return (context_count + 1) / 2;
414 }
415 
417 {
418  RangeCoder *const c = &f->c;
419  uint8_t state[CONTEXT_SIZE];
420  int i, j, k, ret;
421  uint8_t state2[32][CONTEXT_SIZE];
422  unsigned crc = 0;
423 
424  memset(state2, 128, sizeof(state2));
425  memset(state, 128, sizeof(state));
426 
427  ff_init_range_decoder(c, f->avctx->extradata, f->avctx->extradata_size);
428  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
429 
430  f->version = get_symbol(c, state, 0);
431  if (f->version < 2) {
432  av_log(f->avctx, AV_LOG_ERROR, "Invalid version in global header\n");
433  return AVERROR_INVALIDDATA;
434  }
435  if (f->version > 2) {
436  c->bytestream_end -= 4;
437  f->micro_version = get_symbol(c, state, 0);
438  if (f->micro_version < 0)
439  return AVERROR_INVALIDDATA;
440  }
441  f->ac = get_symbol(c, state, 0);
442 
443  if (f->ac == AC_RANGE_CUSTOM_TAB) {
444  for (i = 1; i < 256; i++)
445  f->state_transition[i] = get_symbol(c, state, 1) + c->one_state[i];
446  }
447 
448  f->colorspace = get_symbol(c, state, 0); //YUV cs type
449  f->avctx->bits_per_raw_sample = get_symbol(c, state, 0);
450  f->chroma_planes = get_rac(c, state);
451  f->chroma_h_shift = get_symbol(c, state, 0);
452  f->chroma_v_shift = get_symbol(c, state, 0);
453  f->transparency = get_rac(c, state);
454  f->plane_count = 1 + (f->chroma_planes || f->version<4) + f->transparency;
455  f->num_h_slices = 1 + get_symbol(c, state, 0);
456  f->num_v_slices = 1 + get_symbol(c, state, 0);
457 
458  if (f->chroma_h_shift > 4U || f->chroma_v_shift > 4U) {
459  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
460  f->chroma_h_shift, f->chroma_v_shift);
461  return AVERROR_INVALIDDATA;
462  }
463 
464  if (f->num_h_slices > (unsigned)f->width || !f->num_h_slices ||
465  f->num_v_slices > (unsigned)f->height || !f->num_v_slices
466  ) {
467  av_log(f->avctx, AV_LOG_ERROR, "slice count invalid\n");
468  return AVERROR_INVALIDDATA;
469  }
470 
471  f->quant_table_count = get_symbol(c, state, 0);
472  if (f->quant_table_count > (unsigned)MAX_QUANT_TABLES || !f->quant_table_count) {
473  av_log(f->avctx, AV_LOG_ERROR, "quant table count %d is invalid\n", f->quant_table_count);
474  f->quant_table_count = 0;
475  return AVERROR_INVALIDDATA;
476  }
477 
478  for (i = 0; i < f->quant_table_count; i++) {
479  f->context_count[i] = read_quant_tables(c, f->quant_tables[i]);
480  if (f->context_count[i] < 0) {
481  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
482  return AVERROR_INVALIDDATA;
483  }
484  }
486  return ret;
487 
488  for (i = 0; i < f->quant_table_count; i++)
489  if (get_rac(c, state)) {
490  for (j = 0; j < f->context_count[i]; j++)
491  for (k = 0; k < CONTEXT_SIZE; k++) {
492  int pred = j ? f->initial_states[i][j - 1][k] : 128;
493  f->initial_states[i][j][k] =
494  (pred + get_symbol(c, state2[k], 1)) & 0xFF;
495  }
496  }
497 
498  if (f->version > 2) {
499  f->ec = get_symbol(c, state, 0);
500  if (f->micro_version > 2)
501  f->intra = get_symbol(c, state, 0);
502  }
503 
504  if (f->version > 2) {
505  unsigned v;
507  f->avctx->extradata, f->avctx->extradata_size);
508  if (v || f->avctx->extradata_size < 4) {
509  av_log(f->avctx, AV_LOG_ERROR, "CRC mismatch %X!\n", v);
510  return AVERROR_INVALIDDATA;
511  }
512  crc = AV_RB32(f->avctx->extradata + f->avctx->extradata_size - 4);
513  }
514 
515  if (f->avctx->debug & FF_DEBUG_PICT_INFO)
516  av_log(f->avctx, AV_LOG_DEBUG,
517  "global: ver:%d.%d, coder:%d, colorspace: %d bpr:%d chroma:%d(%d:%d), alpha:%d slices:%dx%d qtabs:%d ec:%d intra:%d CRC:0x%08X\n",
518  f->version, f->micro_version,
519  f->ac,
520  f->colorspace,
521  f->avctx->bits_per_raw_sample,
522  f->chroma_planes, f->chroma_h_shift, f->chroma_v_shift,
523  f->transparency,
524  f->num_h_slices, f->num_v_slices,
525  f->quant_table_count,
526  f->ec,
527  f->intra,
528  crc
529  );
530  return 0;
531 }
532 
534 {
535  uint8_t state[CONTEXT_SIZE];
536  int i, j, context_count = -1; //-1 to avoid warning
537  RangeCoder *const c = &f->slice_context[0]->c;
538 
539  memset(state, 128, sizeof(state));
540 
541  if (f->version < 2) {
543  unsigned v= get_symbol(c, state, 0);
544  if (v >= 2) {
545  av_log(f->avctx, AV_LOG_ERROR, "invalid version %d in ver01 header\n", v);
546  return AVERROR_INVALIDDATA;
547  }
548  f->version = v;
549  f->ac = get_symbol(c, state, 0);
550 
551  if (f->ac == AC_RANGE_CUSTOM_TAB) {
552  for (i = 1; i < 256; i++) {
553  int st = get_symbol(c, state, 1) + c->one_state[i];
554  if (st < 1 || st > 255) {
555  av_log(f->avctx, AV_LOG_ERROR, "invalid state transition %d\n", st);
556  return AVERROR_INVALIDDATA;
557  }
558  f->state_transition[i] = st;
559  }
560  }
561 
562  colorspace = get_symbol(c, state, 0); //YUV cs type
563  bits_per_raw_sample = f->version > 0 ? get_symbol(c, state, 0) : f->avctx->bits_per_raw_sample;
568  if (colorspace == 0 && f->avctx->skip_alpha)
569  transparency = 0;
570 
571  if (f->plane_count) {
572  if (colorspace != f->colorspace ||
573  bits_per_raw_sample != f->avctx->bits_per_raw_sample ||
574  chroma_planes != f->chroma_planes ||
575  chroma_h_shift != f->chroma_h_shift ||
576  chroma_v_shift != f->chroma_v_shift ||
577  transparency != f->transparency) {
578  av_log(f->avctx, AV_LOG_ERROR, "Invalid change of global parameters\n");
579  return AVERROR_INVALIDDATA;
580  }
581  }
582 
583  if (chroma_h_shift > 4U || chroma_v_shift > 4U) {
584  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
586  return AVERROR_INVALIDDATA;
587  }
588 
589  f->colorspace = colorspace;
590  f->avctx->bits_per_raw_sample = bits_per_raw_sample;
591  f->chroma_planes = chroma_planes;
592  f->chroma_h_shift = chroma_h_shift;
593  f->chroma_v_shift = chroma_v_shift;
594  f->transparency = transparency;
595 
596  f->plane_count = 2 + f->transparency;
597  }
598 
599  if (f->colorspace == 0) {
600  if (!f->transparency && !f->chroma_planes) {
601  if (f->avctx->bits_per_raw_sample <= 8)
602  f->avctx->pix_fmt = AV_PIX_FMT_GRAY8;
603  else if (f->avctx->bits_per_raw_sample == 9) {
604  f->packed_at_lsb = 1;
605  f->avctx->pix_fmt = AV_PIX_FMT_GRAY9;
606  } else if (f->avctx->bits_per_raw_sample == 10) {
607  f->packed_at_lsb = 1;
608  f->avctx->pix_fmt = AV_PIX_FMT_GRAY10;
609  } else if (f->avctx->bits_per_raw_sample == 12) {
610  f->packed_at_lsb = 1;
611  f->avctx->pix_fmt = AV_PIX_FMT_GRAY12;
612  } else if (f->avctx->bits_per_raw_sample == 16) {
613  f->packed_at_lsb = 1;
614  f->avctx->pix_fmt = AV_PIX_FMT_GRAY16;
615  } else if (f->avctx->bits_per_raw_sample < 16) {
616  f->avctx->pix_fmt = AV_PIX_FMT_GRAY16;
617  } else
618  return AVERROR(ENOSYS);
619  } else if (f->transparency && !f->chroma_planes) {
620  if (f->avctx->bits_per_raw_sample <= 8)
621  f->avctx->pix_fmt = AV_PIX_FMT_YA8;
622  else
623  return AVERROR(ENOSYS);
624  } else if (f->avctx->bits_per_raw_sample<=8 && !f->transparency) {
625  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
626  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P; break;
627  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P; break;
628  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P; break;
629  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P; break;
630  case 0x20: f->avctx->pix_fmt = AV_PIX_FMT_YUV411P; break;
631  case 0x22: f->avctx->pix_fmt = AV_PIX_FMT_YUV410P; break;
632  }
633  } else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency) {
634  switch(16*f->chroma_h_shift + f->chroma_v_shift) {
635  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P; break;
636  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P; break;
637  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P; break;
638  }
639  } else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency) {
640  f->packed_at_lsb = 1;
641  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
642  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P9; break;
643  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P9; break;
644  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P9; break;
645  }
646  } else if (f->avctx->bits_per_raw_sample == 9 && f->transparency) {
647  f->packed_at_lsb = 1;
648  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
649  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P9; break;
650  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P9; break;
651  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P9; break;
652  }
653  } else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency) {
654  f->packed_at_lsb = 1;
655  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
656  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P10; break;
657  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P10; break;
658  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P10; break;
659  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P10; break;
660  }
661  } else if (f->avctx->bits_per_raw_sample == 10 && f->transparency) {
662  f->packed_at_lsb = 1;
663  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
664  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P10; break;
665  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P10; break;
666  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P10; break;
667  }
668  } else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency) {
669  f->packed_at_lsb = 1;
670  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
671  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P12; break;
672  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P12; break;
673  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P12; break;
674  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P12; break;
675  }
676  } else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency) {
677  f->packed_at_lsb = 1;
678  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
679  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P14; break;
680  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P14; break;
681  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P14; break;
682  }
683  } else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency){
684  f->packed_at_lsb = 1;
685  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
686  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P16; break;
687  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P16; break;
688  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P16; break;
689  }
690  } else if (f->avctx->bits_per_raw_sample == 16 && f->transparency){
691  f->packed_at_lsb = 1;
692  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
693  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P16; break;
694  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P16; break;
695  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P16; break;
696  }
697  }
698  } else if (f->colorspace == 1) {
699  if (f->chroma_h_shift || f->chroma_v_shift) {
700  av_log(f->avctx, AV_LOG_ERROR,
701  "chroma subsampling not supported in this colorspace\n");
702  return AVERROR(ENOSYS);
703  }
704  if ( f->avctx->bits_per_raw_sample <= 8 && !f->transparency)
705  f->avctx->pix_fmt = AV_PIX_FMT_0RGB32;
706  else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency)
707  f->avctx->pix_fmt = AV_PIX_FMT_RGB32;
708  else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency)
709  f->avctx->pix_fmt = AV_PIX_FMT_GBRP9;
710  else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency)
711  f->avctx->pix_fmt = AV_PIX_FMT_GBRP10;
712  else if (f->avctx->bits_per_raw_sample == 10 && f->transparency)
713  f->avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
714  else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency)
715  f->avctx->pix_fmt = AV_PIX_FMT_GBRP12;
716  else if (f->avctx->bits_per_raw_sample == 12 && f->transparency)
717  f->avctx->pix_fmt = AV_PIX_FMT_GBRAP12;
718  else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency)
719  f->avctx->pix_fmt = AV_PIX_FMT_GBRP14;
720  else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency) {
721  f->avctx->pix_fmt = AV_PIX_FMT_GBRP16;
722  f->use32bit = 1;
723  }
724  else if (f->avctx->bits_per_raw_sample == 16 && f->transparency) {
725  f->avctx->pix_fmt = AV_PIX_FMT_GBRAP16;
726  f->use32bit = 1;
727  }
728  } else {
729  av_log(f->avctx, AV_LOG_ERROR, "colorspace not supported\n");
730  return AVERROR(ENOSYS);
731  }
732  if (f->avctx->pix_fmt == AV_PIX_FMT_NONE) {
733  av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
734  return AVERROR(ENOSYS);
735  }
736 
737  ff_dlog(f->avctx, "%d %d %d\n",
738  f->chroma_h_shift, f->chroma_v_shift, f->avctx->pix_fmt);
739  if (f->version < 2) {
740  context_count = read_quant_tables(c, f->quant_table);
741  if (context_count < 0) {
742  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
743  return AVERROR_INVALIDDATA;
744  }
745  f->slice_count = f->max_slice_count;
746  } else if (f->version < 3) {
747  f->slice_count = get_symbol(c, state, 0);
748  } else {
749  const uint8_t *p = c->bytestream_end;
750  for (f->slice_count = 0;
751  f->slice_count < MAX_SLICES && 3 + 5*!!f->ec < p - c->bytestream_start;
752  f->slice_count++) {
753  int trailer = 3 + 5*!!f->ec;
754  int size = AV_RB24(p-trailer);
755  if (size + trailer > p - c->bytestream_start)
756  break;
757  p -= size + trailer;
758  }
759  }
760  if (f->slice_count > (unsigned)MAX_SLICES || f->slice_count <= 0 || f->slice_count > f->max_slice_count) {
761  av_log(f->avctx, AV_LOG_ERROR, "slice count %d is invalid (max=%d)\n", f->slice_count, f->max_slice_count);
762  return AVERROR_INVALIDDATA;
763  }
764 
765  for (j = 0; j < f->slice_count; j++) {
766  FFV1Context *fs = f->slice_context[j];
767  fs->ac = f->ac;
768  fs->packed_at_lsb = f->packed_at_lsb;
769 
770  fs->slice_damaged = 0;
771 
772  if (f->version == 2) {
773  fs->slice_x = get_symbol(c, state, 0) * f->width ;
774  fs->slice_y = get_symbol(c, state, 0) * f->height;
775  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
776  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
777 
778  fs->slice_x /= f->num_h_slices;
779  fs->slice_y /= f->num_v_slices;
780  fs->slice_width = fs->slice_width / f->num_h_slices - fs->slice_x;
781  fs->slice_height = fs->slice_height / f->num_v_slices - fs->slice_y;
782  if ((unsigned)fs->slice_width > f->width ||
783  (unsigned)fs->slice_height > f->height)
784  return AVERROR_INVALIDDATA;
785  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
786  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
787  return AVERROR_INVALIDDATA;
788  }
789 
790  for (i = 0; i < f->plane_count; i++) {
791  PlaneContext *const p = &fs->plane[i];
792 
793  if (f->version == 2) {
794  int idx = get_symbol(c, state, 0);
795  if (idx >= (unsigned)f->quant_table_count) {
796  av_log(f->avctx, AV_LOG_ERROR,
797  "quant_table_index out of range\n");
798  return AVERROR_INVALIDDATA;
799  }
800  p->quant_table_index = idx;
801  memcpy(p->quant_table, f->quant_tables[idx],
802  sizeof(p->quant_table));
803  context_count = f->context_count[idx];
804  } else {
805  memcpy(p->quant_table, f->quant_table, sizeof(p->quant_table));
806  }
807 
808  if (f->version <= 2) {
810  if (p->context_count < context_count) {
811  av_freep(&p->state);
812  av_freep(&p->vlc_state);
813  }
815  }
816  }
817  }
818  return 0;
819 }
820 
822 {
824  int ret;
825 
826  if ((ret = ff_ffv1_common_init(avctx)) < 0)
827  return ret;
828 
829  f->picture.f = av_frame_alloc();
830  f->last_picture.f = av_frame_alloc();
831  if (!f->picture.f || !f->last_picture.f)
832  return AVERROR(ENOMEM);
833 
834  if (avctx->extradata_size > 0 && (ret = read_extra_header(f)) < 0)
835  return ret;
836 
837  if ((ret = ff_ffv1_init_slice_contexts(f)) < 0)
838  return ret;
839 
840  return 0;
841 }
842 
844  int *got_frame, AVPacket *avpkt)
845 {
846  uint8_t *buf = avpkt->data;
847  int buf_size = avpkt->size;
849  RangeCoder *const c = &f->slice_context[0]->c;
850  int i, ret;
851  uint8_t keystate = 128;
852  uint8_t *buf_p;
853  AVFrame *p;
854 
855  if (f->last_picture.f)
856  ff_thread_release_ext_buffer(avctx, &f->last_picture);
857  FFSWAP(ThreadFrame, f->picture, f->last_picture);
858 
859  f->cur = p = f->picture.f;
860 
861  if (f->version < 3 && avctx->field_order > AV_FIELD_PROGRESSIVE) {
862  /* we have interlaced material flagged in container */
863  p->interlaced_frame = 1;
865  p->top_field_first = 1;
866  }
867 
868  f->avctx = avctx;
869  ff_init_range_decoder(c, buf, buf_size);
870  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
871 
872  p->pict_type = AV_PICTURE_TYPE_I; //FIXME I vs. P
873  if (get_rac(c, &keystate)) {
874  p->key_frame = 1;
875  f->key_frame_ok = 0;
876  if ((ret = read_header(f)) < 0)
877  return ret;
878  f->key_frame_ok = 1;
879  } else {
880  if (!f->key_frame_ok) {
882  "Cannot decode non-keyframe without valid keyframe\n");
883  return AVERROR_INVALIDDATA;
884  }
885  p->key_frame = 0;
886  }
887 
888  if (f->ac != AC_GOLOMB_RICE) {
889  if (buf_size < avctx->width * avctx->height / (128*8))
890  return AVERROR_INVALIDDATA;
891  } else {
892  int w = avctx->width;
893  int s = 1 + w / (1<<23);
894 
895  w /= s;
896 
897  for (i = 0; w > (1<<ff_log2_run[i]); i++)
898  w -= ff_log2_run[i];
899  if (buf_size < (avctx->height + i + 6) / 8 * s)
900  return AVERROR_INVALIDDATA;
901  }
902 
904  if (ret < 0)
905  return ret;
906 
908  av_log(avctx, AV_LOG_DEBUG, "ver:%d keyframe:%d coder:%d ec:%d slices:%d bps:%d\n",
909  f->version, p->key_frame, f->ac, f->ec, f->slice_count, f->avctx->bits_per_raw_sample);
910 
912 
913  buf_p = buf + buf_size;
914  for (i = f->slice_count - 1; i >= 0; i--) {
915  FFV1Context *fs = f->slice_context[i];
916  int trailer = 3 + 5*!!f->ec;
917  int v;
918 
919  if (i || f->version > 2) {
920  if (trailer > buf_p - buf) v = INT_MAX;
921  else v = AV_RB24(buf_p-trailer) + trailer;
922  } else v = buf_p - c->bytestream_start;
923  if (buf_p - c->bytestream_start < v) {
924  av_log(avctx, AV_LOG_ERROR, "Slice pointer chain broken\n");
925  ff_thread_report_progress(&f->picture, INT_MAX, 0);
926  return AVERROR_INVALIDDATA;
927  }
928  buf_p -= v;
929 
930  if (f->ec) {
931  unsigned crc = av_crc(av_crc_get_table(AV_CRC_32_IEEE), 0, buf_p, v);
932  if (crc) {
933  int64_t ts = avpkt->pts != AV_NOPTS_VALUE ? avpkt->pts : avpkt->dts;
934  av_log(f->avctx, AV_LOG_ERROR, "slice CRC mismatch %X!", crc);
935  if (ts != AV_NOPTS_VALUE && avctx->pkt_timebase.num) {
936  av_log(f->avctx, AV_LOG_ERROR, "at %f seconds\n", ts*av_q2d(avctx->pkt_timebase));
937  } else if (ts != AV_NOPTS_VALUE) {
938  av_log(f->avctx, AV_LOG_ERROR, "at %"PRId64"\n", ts);
939  } else {
940  av_log(f->avctx, AV_LOG_ERROR, "\n");
941  }
942  fs->slice_damaged = 1;
943  }
944  if (avctx->debug & FF_DEBUG_PICT_INFO) {
945  av_log(avctx, AV_LOG_DEBUG, "slice %d, CRC: 0x%08"PRIX32"\n", i, AV_RB32(buf_p + v - 4));
946  }
947  }
948 
949  if (i) {
950  ff_init_range_decoder(&fs->c, buf_p, v);
951  } else
952  fs->c.bytestream_end = buf_p + v;
953 
954  fs->avctx = avctx;
955  }
956 
958  decode_slice,
959  &f->slice_context[0],
960  NULL,
961  f->slice_count,
962  sizeof(void*));
963 
964  for (i = f->slice_count - 1; i >= 0; i--) {
965  FFV1Context *fs = f->slice_context[i];
966  int j;
967  if (fs->slice_damaged && f->last_picture.f->data[0]) {
969  const uint8_t *src[4];
970  uint8_t *dst[4];
971  ff_thread_await_progress(&f->last_picture, INT_MAX, 0);
972  for (j = 0; j < desc->nb_components; j++) {
973  int pixshift = desc->comp[j].depth > 8;
974  int sh = (j == 1 || j == 2) ? f->chroma_h_shift : 0;
975  int sv = (j == 1 || j == 2) ? f->chroma_v_shift : 0;
976  dst[j] = p->data[j] + p->linesize[j] *
977  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
978  src[j] = f->last_picture.f->data[j] + f->last_picture.f->linesize[j] *
979  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
980 
981  }
982  if (desc->flags & AV_PIX_FMT_FLAG_PAL) {
983  dst[1] = p->data[1];
984  src[1] = f->last_picture.f->data[1];
985  }
986  av_image_copy(dst, p->linesize, src,
987  f->last_picture.f->linesize,
988  avctx->pix_fmt,
989  fs->slice_width,
990  fs->slice_height);
991  }
992  }
993  ff_thread_report_progress(&f->picture, INT_MAX, 0);
994 
995  if (f->last_picture.f)
996  ff_thread_release_ext_buffer(avctx, &f->last_picture);
997  if ((ret = av_frame_ref(rframe, f->picture.f)) < 0)
998  return ret;
999 
1000  *got_frame = 1;
1001 
1002  return buf_size;
1003 }
1004 
1005 static void copy_fields(FFV1Context *fsdst, const FFV1Context *fssrc,
1006  const FFV1Context *fsrc)
1007 {
1008  fsdst->version = fsrc->version;
1009  fsdst->micro_version = fsrc->micro_version;
1010  fsdst->chroma_planes = fsrc->chroma_planes;
1013  fsdst->transparency = fsrc->transparency;
1014  fsdst->plane_count = fsrc->plane_count;
1015  fsdst->ac = fsrc->ac;
1016  fsdst->colorspace = fsrc->colorspace;
1017 
1018  fsdst->ec = fsrc->ec;
1019  fsdst->intra = fsrc->intra;
1020  fsdst->slice_damaged = fssrc->slice_damaged;
1021  fsdst->key_frame_ok = fsrc->key_frame_ok;
1022 
1023  fsdst->packed_at_lsb = fsrc->packed_at_lsb;
1024  fsdst->slice_count = fsrc->slice_count;
1025  if (fsrc->version<3){
1026  fsdst->slice_x = fssrc->slice_x;
1027  fsdst->slice_y = fssrc->slice_y;
1028  fsdst->slice_width = fssrc->slice_width;
1029  fsdst->slice_height = fssrc->slice_height;
1030  }
1031 }
1032 
1033 #if HAVE_THREADS
1034 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1035 {
1036  FFV1Context *fsrc = src->priv_data;
1037  FFV1Context *fdst = dst->priv_data;
1038  int i, ret;
1039 
1040  if (dst == src)
1041  return 0;
1042 
1043  {
1045  uint8_t (*initial_states[MAX_QUANT_TABLES])[32];
1047  memcpy(initial_states, fdst->initial_states, sizeof(fdst->initial_states));
1048  memcpy(slice_context, fdst->slice_context , sizeof(fdst->slice_context));
1049 
1050  memcpy(fdst, fsrc, sizeof(*fdst));
1051  memcpy(fdst->initial_states, initial_states, sizeof(fdst->initial_states));
1052  memcpy(fdst->slice_context, slice_context , sizeof(fdst->slice_context));
1053  fdst->picture = picture;
1054  fdst->last_picture = last_picture;
1055  for (i = 0; i<fdst->num_h_slices * fdst->num_v_slices; i++) {
1056  FFV1Context *fssrc = fsrc->slice_context[i];
1057  FFV1Context *fsdst = fdst->slice_context[i];
1058  copy_fields(fsdst, fssrc, fsrc);
1059  }
1060  av_assert0(!fdst->plane[0].state);
1061  av_assert0(!fdst->sample_buffer);
1062  }
1063 
1065 
1066 
1068  if (fsrc->picture.f->data[0]) {
1069  if ((ret = ff_thread_ref_frame(&fdst->picture, &fsrc->picture)) < 0)
1070  return ret;
1071  }
1072 
1073  fdst->fsrc = fsrc;
1074 
1075  return 0;
1076 }
1077 #endif
1078 
1080 {
1081  FFV1Context *const s = avctx->priv_data;
1082 
1083  if (s->picture.f) {
1084  ff_thread_release_ext_buffer(avctx, &s->picture);
1085  av_frame_free(&s->picture.f);
1086  }
1087 
1088  if (s->last_picture.f) {
1089  ff_thread_release_ext_buffer(avctx, &s->last_picture);
1090  av_frame_free(&s->last_picture.f);
1091  }
1092  return ff_ffv1_close(avctx);
1093 }
1094 
1096  .p.name = "ffv1",
1097  CODEC_LONG_NAME("FFmpeg video codec #1"),
1098  .p.type = AVMEDIA_TYPE_VIDEO,
1099  .p.id = AV_CODEC_ID_FFV1,
1100  .priv_data_size = sizeof(FFV1Context),
1101  .init = decode_init,
1102  .close = ffv1_decode_close,
1105  .p.capabilities = AV_CODEC_CAP_DR1 |
1107  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP |
1109 };
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:502
read_extra_header
static int read_extra_header(FFV1Context *f)
Definition: ffv1dec.c:416
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:481
FFV1Context::chroma_v_shift
int chroma_v_shift
Definition: ffv1.h:85
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
FFV1Context::key_frame_ok
int key_frame_ok
Definition: ffv1.h:114
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: codec_par.h:40
update_vlc_state
static void update_vlc_state(VlcState *const state, const int v)
Definition: ffv1.h:157
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:42
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:839
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
decode_slice
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:248
opt.h
AV_PIX_FMT_YA8
@ AV_PIX_FMT_YA8
8 bits gray, 8 bits alpha
Definition: pixfmt.h:133
ff_thread_release_ext_buffer
void ff_thread_release_ext_buffer(AVCodecContext *avctx, ThreadFrame *f)
Unref a ThreadFrame.
Definition: pthread_frame.c:1178
is_input_end
static int is_input_end(FFV1Context *s)
Definition: ffv1dec.c:97
FFV1Context::context_count
int context_count[MAX_QUANT_TABLES]
Definition: ffv1.h:101
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2858
MAX_OVERREAD
#define MAX_OVERREAD
Definition: lagarithrac.h:50
FFV1Context::ec
int ec
Definition: ffv1.h:111
FFV1Context::gb
GetBitContext gb
Definition: ffv1.h:77
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:116
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:494
get_sr_golomb
static int get_sr_golomb(GetBitContext *gb, int k, int limit, int esc_len)
read signed golomb rice code (ffv1).
Definition: golomb.h:529
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
ff_ffv1_common_init
av_cold int ff_ffv1_common_init(AVCodecContext *avctx)
Definition: ffv1.c:36
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:501
w
uint8_t w
Definition: llviddspenc.c:38
FFV1Context::last_picture
ThreadFrame last_picture
Definition: ffv1.h:90
AVPacket::data
uint8_t * data
Definition: packet.h:374
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:496
AVCodecContext::field_order
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:1004
FFV1Context::slice_x
int slice_x
Definition: ffv1.h:130
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:482
last_picture
enum AVPictureType last_picture
Definition: movenc.c:69
rangecoder.h
AVComponentDescriptor::step
int step
Number of elements between 2 horizontally consecutive pixels.
Definition: pixdesc.h:40
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:459
PlaneContext::state
uint8_t(* state)[CONTEXT_SIZE]
Definition: ffv1.h:66
FFCodec
Definition: codec_internal.h:119
FFV1Context::num_h_slices
int num_h_slices
Definition: ffv1.h:127
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
FFV1Context::slice_context
struct FFV1Context * slice_context[MAX_SLICES]
Definition: ffv1.h:123
read_quant_table
static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
Definition: ffv1dec.c:371
AC_RANGE_CUSTOM_TAB
#define AC_RANGE_CUSTOM_TAB
Definition: ffv1.h:52
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:497
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:649
thread.h
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
ThreadFrame::f
AVFrame * f
Definition: threadframe.h:28
FFV1Context::chroma_h_shift
int chroma_h_shift
Definition: ffv1.h:85
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1328
AV_PIX_FMT_GRAY9
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:439
decode_line
static av_always_inline int RENAME() decode_line(FFV1Context *s, int w, TYPE *sample[2], int plane_index, int bits)
Definition: ffv1dec_template.c:25
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
crc.h
planes
static const struct @344 planes[]
golomb.h
exp golomb vlc stuff
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:493
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:123
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:477
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:475
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:503
ffv1_decode_close
static av_cold int ffv1_decode_close(AVCodecContext *avctx)
Definition: ffv1dec.c:1079
GetBitContext
Definition: get_bits.h:61
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:457
AVFrame::key_frame
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:417
state
static struct @343 state
av_noinline
#define av_noinline
Definition: attributes.h:72
CONTEXT_SIZE
#define CONTEXT_SIZE
Definition: ffv1.h:45
scale
static av_always_inline float scale(float x, float s)
Definition: vf_v360.c:1389
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:443
get_symbol_inline
static av_flatten int get_symbol_inline(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:43
FFV1Context::chroma_planes
int chroma_planes
Definition: ffv1.h:84
PlaneContext::context_count
int context_count
Definition: ffv1.h:65
AVRational::num
int num
Numerator.
Definition: rational.h:59
ff_ffv1_clear_slice_state
void ff_ffv1_clear_slice_state(const FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:163
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:104
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:462
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:471
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:624
FFV1Context::bits_per_raw_sample
int bits_per_raw_sample
Definition: ffv1.h:117
AV_PIX_FMT_GBRAP10
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:479
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:500
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:298
s
#define s(width, name)
Definition: cbs_vp9.c:256
AV_PIX_FMT_GBRAP12
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:480
FFV1Context::slice_count
int slice_count
Definition: ffv1.h:124
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:472
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:376
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *rframe, int *got_frame, AVPacket *avpkt)
Definition: ffv1dec.c:843
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
FFV1Context::plane
PlaneContext plane[MAX_PLANES]
Definition: ffv1.h:98
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:363
FFV1Context::max_slice_count
int max_slice_count
Definition: ffv1.h:125
FFV1Context::slice_damaged
int slice_damaged
Definition: ffv1.h:113
bits
uint8_t bits
Definition: vp3data.h:128
FFV1Context::intra
int intra
Definition: ffv1.h:112
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
FFV1Context::fsrc
struct FFV1Context * fsrc
Definition: ffv1.h:91
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:456
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
read_quant_tables
static int read_quant_tables(RangeCoder *c, int16_t quant_table[MAX_CONTEXT_INPUTS][256])
Definition: ffv1dec.c:398
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:470
get_bits.h
fold
static av_always_inline int fold(int diff, int bits)
Definition: ffv1.h:146
FFV1Context::ac
int ac
1=range coder <-> 0=golomb rice
Definition: ffv1.h:96
get_vlc_symbol
static int get_vlc_symbol(GetBitContext *gb, VlcState *const state, int bits)
Definition: ffv1dec.c:72
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
FFV1Context::plane_count
int plane_count
Definition: ffv1.h:95
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:264
ff_thread_ref_frame
int ff_thread_ref_frame(ThreadFrame *dst, const ThreadFrame *src)
Definition: utils.c:885
arg
const char * arg
Definition: jacosubdec.c:67
AV_PIX_FMT_GRAY10
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:440
if
if(ret)
Definition: filter_design.txt:179
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:113
threadframe.h
FFV1Context::slice_height
int slice_height
Definition: ffv1.h:129
quant_table
static const int16_t quant_table[64]
Definition: intrax8.c:522
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:478
read_header
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:533
get_symbol
static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:67
NULL
#define NULL
Definition: coverity.c:32
PlaneContext::vlc_state
VlcState * vlc_state
Definition: ffv1.h:67
AC_GOLOMB_RICE
#define AC_GOLOMB_RICE
Definition: ffv1.h:50
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:258
FFV1Context::num_v_slices
int num_v_slices
Definition: ffv1.h:126
FFV1Context::colorspace
int colorspace
Definition: ffv1.h:105
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
ff_ffv1_decoder
const FFCodec ff_ffv1_decoder
Definition: ffv1dec.c:1095
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:461
mathops.h
PlaneContext
Definition: ffv1.h:62
UPDATE_THREAD_CONTEXT
#define UPDATE_THREAD_CONTEXT(func)
Definition: codec_internal.h:273
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:460
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:474
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
VlcState
Definition: ffv1.h:55
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
FFV1Context::slice_width
int slice_width
Definition: ffv1.h:128
ff_init_range_decoder
av_cold void ff_init_range_decoder(RangeCoder *c, const uint8_t *buf, int buf_size)
Definition: rangecoder.c:53
AV_CODEC_ID_FFV1
@ AV_CODEC_ID_FFV1
Definition: codec_id.h:85
f
f
Definition: af_crystalizer.c:122
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:422
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:375
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:353
codec_internal.h
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:464
ff_ffv1_close
av_cold int ff_ffv1_close(AVCodecContext *avctx)
Definition: ffv1.c:191
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:1749
sample
#define sample
Definition: flacdsp_template.c:44
FFV1Context::picture
ThreadFrame picture
Definition: ffv1.h:90
size
int size
Definition: twinvq_data.h:10344
ff_build_rac_states
void ff_build_rac_states(RangeCoder *c, int factor, int max_p)
Definition: rangecoder.c:68
FF_CODEC_CAP_ALLOCATE_PROGRESS
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: codec_internal.h:69
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AV_RB32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:96
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:466
FFV1Context::sample_buffer
int16_t * sample_buffer
Definition: ffv1.h:106
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:373
height
#define height
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:432
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:167
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: codec_par.h:41
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:117
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:498
PlaneContext::quant_table_index
int quant_table_index
Definition: ffv1.h:64
FFV1Context::initial_states
uint8_t(*[MAX_QUANT_TABLES] initial_states)[32]
Definition: ffv1.h:103
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:477
decode_plane
static int decode_plane(FFV1Context *s, uint8_t *src, int w, int h, int stride, int plane_index, int pixel_stride)
Definition: ffv1dec.c:120
copy_fields
static void copy_fields(FFV1Context *fsdst, const FFV1Context *fssrc, const FFV1Context *fsrc)
Definition: ffv1dec.c:1005
ff_ffv1_init_slice_state
av_cold int ff_ffv1_init_slice_state(const FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:56
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
av_flatten
#define av_flatten
Definition: attributes.h:96
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:476
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
FFV1Context::slice_y
int slice_y
Definition: ffv1.h:131
ffv1.h
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:211
update_thread_context
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread. Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities. There will be very little speed gain at this point but it should work. If there are inter-frame dependencies
len
int len
Definition: vorbis_enc_data.h:426
get_rac
static int get_rac(RangeCoder *c, uint8_t *const state)
Definition: rangecoder.h:127
AV_CRC_32_IEEE
@ AV_CRC_32_IEEE
Definition: crc.h:52
AVCodecContext::height
int height
Definition: avcodec.h:571
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:608
ff_thread_get_ext_buffer
int ff_thread_get_ext_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around ff_get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1080
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:458
MAX_CONTEXT_INPUTS
#define MAX_CONTEXT_INPUTS
Definition: ffv1.h:48
FFV1Context::packed_at_lsb
int packed_at_lsb
Definition: ffv1.h:118
avcodec.h
stride
#define stride
Definition: h264pred_template.c:537
FFV1Context::avctx
AVCodecContext * avctx
Definition: ffv1.h:75
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AV_PIX_FMT_0RGB32
#define AV_PIX_FMT_0RGB32
Definition: pixfmt.h:436
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:495
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:463
U
#define U(x)
Definition: vpx_arith.h:37
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:468
ff_ffv1_allocate_initial_states
int ff_ffv1_allocate_initial_states(FFV1Context *f)
Definition: ffv1.c:148
decode_rgb_frame
static int RENAME() decode_rgb_frame(FFV1Context *s, uint8_t *src[4], int w, int h, int stride[4])
Definition: ffv1dec_template.c:130
AVCodecContext
main external API structure.
Definition: avcodec.h:398
MAX_SLICES
#define MAX_SLICES
Definition: dxva2_hevc.c:31
ThreadFrame
Definition: threadframe.h:27
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1517
av_image_copy
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: ffv1dec.c:821
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AVPixFmtDescriptor::comp
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:105
temp
else temp
Definition: vf_mcdeint.c:248
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVCodecContext::debug
int debug
debug
Definition: avcodec.h:1327
desc
const char * desc
Definition: libsvtav1.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
MAX_QUANT_TABLES
#define MAX_QUANT_TABLES
Definition: ffv1.h:47
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFV1Context
Definition: ffv1.h:73
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
FFV1Context::transparency
int transparency
Definition: ffv1.h:86
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:425
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_FIELD_TB
@ AV_FIELD_TB
Top coded first, bottom displayed first.
Definition: codec_par.h:43
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
ffv1dec_template.c
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:571
ff_ffv1_init_slice_contexts
av_cold int ff_ffv1_init_slice_contexts(FFV1Context *f)
Definition: ffv1.c:106
ff_log2_run
const uint8_t ff_log2_run[41]
Definition: mathtables.c:116
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
FFV1Context::micro_version
int micro_version
Definition: ffv1.h:82
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:465
h
h
Definition: vp9dsp_template.c:2038
RangeCoder
Definition: mss3.c:62
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:469
av_image_check_sar
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:323
AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:441
PlaneContext::quant_table
int16_t quant_table[MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:63
AV_RB24
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_RB24
Definition: bytestream.h:97
decode_slice_header
static int decode_slice_header(const FFV1Context *f, FFV1Context *fs)
Definition: ffv1dec.c:166
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
FFV1Context::version
int version
Definition: ffv1.h:81
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:166
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:467