FFmpeg
ffv1dec.c
Go to the documentation of this file.
1 /*
2  * FFV1 decoder
3  *
4  * Copyright (c) 2003-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * FF Video Codec 1 (a lossless codec) decoder
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/crc.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/imgutils.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/timer.h"
34 #include "avcodec.h"
35 #include "internal.h"
36 #include "get_bits.h"
37 #include "rangecoder.h"
38 #include "golomb.h"
39 #include "mathops.h"
40 #include "ffv1.h"
41 
43  int is_signed)
44 {
45  if (get_rac(c, state + 0))
46  return 0;
47  else {
48  int i, e;
49  unsigned a;
50  e = 0;
51  while (get_rac(c, state + 1 + FFMIN(e, 9))) { // 1..10
52  e++;
53  if (e > 31)
54  return AVERROR_INVALIDDATA;
55  }
56 
57  a = 1;
58  for (i = e - 1; i >= 0; i--)
59  a += a + get_rac(c, state + 22 + FFMIN(i, 9)); // 22..31
60 
61  e = -(is_signed && get_rac(c, state + 11 + FFMIN(e, 10))); // 11..21
62  return (a ^ e) - e;
63  }
64 }
65 
66 static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
67 {
68  return get_symbol_inline(c, state, is_signed);
69 }
70 
71 static inline int get_vlc_symbol(GetBitContext *gb, VlcState *const state,
72  int bits)
73 {
74  int k, i, v, ret;
75 
76  i = state->count;
77  k = 0;
78  while (i < state->error_sum) { // FIXME: optimize
79  k++;
80  i += i;
81  }
82 
83  v = get_sr_golomb(gb, k, 12, bits);
84  ff_dlog(NULL, "v:%d bias:%d error:%d drift:%d count:%d k:%d",
85  v, state->bias, state->error_sum, state->drift, state->count, k);
86 
87  v ^= ((2 * state->drift + state->count) >> 31);
88 
89  ret = fold(v + state->bias, bits);
90 
91  update_vlc_state(state, v);
92 
93  return ret;
94 }
95 
97 {
98  if (s->ac != AC_GOLOMB_RICE) {
99  RangeCoder *const c = &s->c;
100  if (c->overread > MAX_OVERREAD)
101  return AVERROR_INVALIDDATA;
102  } else {
103  if (get_bits_left(&s->gb) < 1)
104  return AVERROR_INVALIDDATA;
105  }
106  return 0;
107 }
108 
109 #define TYPE int16_t
110 #define RENAME(name) name
111 #include "ffv1dec_template.c"
112 #undef TYPE
113 #undef RENAME
114 
115 #define TYPE int32_t
116 #define RENAME(name) name ## 32
117 #include "ffv1dec_template.c"
118 
120  int w, int h, int stride, int plane_index,
121  int pixel_stride)
122 {
123  int x, y;
124  int16_t *sample[2];
125  sample[0] = s->sample_buffer + 3;
126  sample[1] = s->sample_buffer + w + 6 + 3;
127 
128  s->run_index = 0;
129 
130  memset(s->sample_buffer, 0, 2 * (w + 6) * sizeof(*s->sample_buffer));
131 
132  for (y = 0; y < h; y++) {
133  int16_t *temp = sample[0]; // FIXME: try a normal buffer
134 
135  sample[0] = sample[1];
136  sample[1] = temp;
137 
138  sample[1][-1] = sample[0][0];
139  sample[0][w] = sample[0][w - 1];
140 
141 // { START_TIMER
142  if (s->avctx->bits_per_raw_sample <= 8) {
143  int ret = decode_line(s, w, sample, plane_index, 8);
144  if (ret < 0)
145  return ret;
146  for (x = 0; x < w; x++)
147  src[x*pixel_stride + stride * y] = sample[1][x];
148  } else {
149  int ret = decode_line(s, w, sample, plane_index, s->avctx->bits_per_raw_sample);
150  if (ret < 0)
151  return ret;
152  if (s->packed_at_lsb) {
153  for (x = 0; x < w; x++) {
154  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x];
155  }
156  } else {
157  for (x = 0; x < w; x++) {
158  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x] << (16 - s->avctx->bits_per_raw_sample) | ((uint16_t **)sample)[1][x] >> (2 * s->avctx->bits_per_raw_sample - 16);
159  }
160  }
161  }
162 // STOP_TIMER("decode-line") }
163  }
164  return 0;
165 }
166 
168 {
169  RangeCoder *c = &fs->c;
171  unsigned ps, i, context_count;
172  memset(state, 128, sizeof(state));
173 
174  av_assert0(f->version > 2);
175 
176  fs->slice_x = get_symbol(c, state, 0) * f->width ;
177  fs->slice_y = get_symbol(c, state, 0) * f->height;
178  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
179  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
180 
181  fs->slice_x /= f->num_h_slices;
182  fs->slice_y /= f->num_v_slices;
183  fs->slice_width = fs->slice_width /f->num_h_slices - fs->slice_x;
184  fs->slice_height = fs->slice_height/f->num_v_slices - fs->slice_y;
185  if ((unsigned)fs->slice_width > f->width || (unsigned)fs->slice_height > f->height)
186  return -1;
187  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
188  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
189  return -1;
190 
191  for (i = 0; i < f->plane_count; i++) {
192  PlaneContext * const p = &fs->plane[i];
193  int idx = get_symbol(c, state, 0);
194  if (idx >= (unsigned)f->quant_table_count) {
195  av_log(f->avctx, AV_LOG_ERROR, "quant_table_index out of range\n");
196  return -1;
197  }
198  p->quant_table_index = idx;
199  memcpy(p->quant_table, f->quant_tables[idx], sizeof(p->quant_table));
200  context_count = f->context_count[idx];
201 
202  if (p->context_count < context_count) {
203  av_freep(&p->state);
204  av_freep(&p->vlc_state);
205  }
207  }
208 
209  ps = get_symbol(c, state, 0);
210  if (ps == 1) {
211  f->cur->interlaced_frame = 1;
212  f->cur->top_field_first = 1;
213  } else if (ps == 2) {
214  f->cur->interlaced_frame = 1;
215  f->cur->top_field_first = 0;
216  } else if (ps == 3) {
217  f->cur->interlaced_frame = 0;
218  }
219  f->cur->sample_aspect_ratio.num = get_symbol(c, state, 0);
220  f->cur->sample_aspect_ratio.den = get_symbol(c, state, 0);
221 
222  if (av_image_check_sar(f->width, f->height,
223  f->cur->sample_aspect_ratio) < 0) {
224  av_log(f->avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
227  f->cur->sample_aspect_ratio = (AVRational){ 0, 1 };
228  }
229 
230  if (fs->version > 3) {
231  fs->slice_reset_contexts = get_rac(c, state);
232  fs->slice_coding_mode = get_symbol(c, state, 0);
233  if (fs->slice_coding_mode != 1) {
234  fs->slice_rct_by_coef = get_symbol(c, state, 0);
235  fs->slice_rct_ry_coef = get_symbol(c, state, 0);
236  if ((uint64_t)fs->slice_rct_by_coef + (uint64_t)fs->slice_rct_ry_coef > 4) {
237  av_log(f->avctx, AV_LOG_ERROR, "slice_rct_y_coef out of range\n");
238  return AVERROR_INVALIDDATA;
239  }
240  }
241  }
242 
243  return 0;
244 }
245 
246 static int decode_slice(AVCodecContext *c, void *arg)
247 {
248  FFV1Context *fs = *(void **)arg;
249  FFV1Context *f = fs->avctx->priv_data;
250  int width, height, x, y, ret;
251  const int ps = av_pix_fmt_desc_get(c->pix_fmt)->comp[0].step;
252  AVFrame * const p = f->cur;
253  int i, si;
254 
255  for( si=0; fs != f->slice_context[si]; si ++)
256  ;
257 
258  if(f->fsrc && !p->key_frame)
260 
261  if(f->fsrc && !p->key_frame) {
262  FFV1Context *fssrc = f->fsrc->slice_context[si];
263  FFV1Context *fsdst = f->slice_context[si];
264  av_assert1(fsdst->plane_count == fssrc->plane_count);
265  av_assert1(fsdst == fs);
266 
267  if (!p->key_frame)
268  fsdst->slice_damaged |= fssrc->slice_damaged;
269 
270  for (i = 0; i < f->plane_count; i++) {
271  PlaneContext *psrc = &fssrc->plane[i];
272  PlaneContext *pdst = &fsdst->plane[i];
273 
274  av_free(pdst->state);
275  av_free(pdst->vlc_state);
276  memcpy(pdst, psrc, sizeof(*pdst));
277  pdst->state = NULL;
278  pdst->vlc_state = NULL;
279 
280  if (fssrc->ac) {
282  memcpy(pdst->state, psrc->state, CONTEXT_SIZE * psrc->context_count);
283  } else {
284  pdst->vlc_state = av_malloc_array(sizeof(*pdst->vlc_state), psrc->context_count);
285  memcpy(pdst->vlc_state, psrc->vlc_state, sizeof(*pdst->vlc_state) * psrc->context_count);
286  }
287  }
288  }
289 
290  fs->slice_rct_by_coef = 1;
291  fs->slice_rct_ry_coef = 1;
292 
293  if (f->version > 2) {
294  if (ff_ffv1_init_slice_state(f, fs) < 0)
295  return AVERROR(ENOMEM);
296  if (decode_slice_header(f, fs) < 0) {
297  fs->slice_x = fs->slice_y = fs->slice_height = fs->slice_width = 0;
298  fs->slice_damaged = 1;
299  return AVERROR_INVALIDDATA;
300  }
301  }
302  if ((ret = ff_ffv1_init_slice_state(f, fs)) < 0)
303  return ret;
304  if (f->cur->key_frame || fs->slice_reset_contexts)
306 
307  width = fs->slice_width;
308  height = fs->slice_height;
309  x = fs->slice_x;
310  y = fs->slice_y;
311 
312  if (fs->ac == AC_GOLOMB_RICE) {
313  if (f->version == 3 && f->micro_version > 1 || f->version > 3)
314  get_rac(&fs->c, (uint8_t[]) { 129 });
315  fs->ac_byte_count = f->version > 2 || (!x && !y) ? fs->c.bytestream - fs->c.bytestream_start - 1 : 0;
316  init_get_bits(&fs->gb,
317  fs->c.bytestream_start + fs->ac_byte_count,
318  (fs->c.bytestream_end - fs->c.bytestream_start - fs->ac_byte_count) * 8);
319  }
320 
321  av_assert1(width && height);
322  if (f->colorspace == 0 && (f->chroma_planes || !fs->transparency)) {
323  const int chroma_width = AV_CEIL_RSHIFT(width, f->chroma_h_shift);
324  const int chroma_height = AV_CEIL_RSHIFT(height, f->chroma_v_shift);
325  const int cx = x >> f->chroma_h_shift;
326  const int cy = y >> f->chroma_v_shift;
327  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0], width, height, p->linesize[0], 0, 1);
328 
329  if (f->chroma_planes) {
330  decode_plane(fs, p->data[1] + ps*cx+cy*p->linesize[1], chroma_width, chroma_height, p->linesize[1], 1, 1);
331  decode_plane(fs, p->data[2] + ps*cx+cy*p->linesize[2], chroma_width, chroma_height, p->linesize[2], 1, 1);
332  }
333  if (fs->transparency)
334  decode_plane(fs, p->data[3] + ps*x + y*p->linesize[3], width, height, p->linesize[3], (f->version >= 4 && !f->chroma_planes) ? 1 : 2, 1);
335  } else if (f->colorspace == 0) {
336  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] , width, height, p->linesize[0], 0, 2);
337  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] + 1, width, height, p->linesize[0], 1, 2);
338  } else if (f->use32bit) {
339  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
340  p->data[1] + ps * x + y * p->linesize[1],
341  p->data[2] + ps * x + y * p->linesize[2],
342  p->data[3] + ps * x + y * p->linesize[3] };
343  decode_rgb_frame32(fs, planes, width, height, p->linesize);
344  } else {
345  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
346  p->data[1] + ps * x + y * p->linesize[1],
347  p->data[2] + ps * x + y * p->linesize[2],
348  p->data[3] + ps * x + y * p->linesize[3] };
349  decode_rgb_frame(fs, planes, width, height, p->linesize);
350  }
351  if (fs->ac != AC_GOLOMB_RICE && f->version > 2) {
352  int v;
353  get_rac(&fs->c, (uint8_t[]) { 129 });
354  v = fs->c.bytestream_end - fs->c.bytestream - 2 - 5*f->ec;
355  if (v) {
356  av_log(f->avctx, AV_LOG_ERROR, "bytestream end mismatching by %d\n", v);
357  fs->slice_damaged = 1;
358  }
359  }
360 
361  emms_c();
362 
364 
365  return 0;
366 }
367 
368 static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
369 {
370  int v;
371  int i = 0;
373 
374  memset(state, 128, sizeof(state));
375 
376  for (v = 0; i < 128; v++) {
377  unsigned len = get_symbol(c, state, 0) + 1U;
378 
379  if (len > 128 - i || !len)
380  return AVERROR_INVALIDDATA;
381 
382  while (len--) {
383  quant_table[i] = scale * v;
384  i++;
385  }
386  }
387 
388  for (i = 1; i < 128; i++)
389  quant_table[256 - i] = -quant_table[i];
390  quant_table[128] = -quant_table[127];
391 
392  return 2 * v - 1;
393 }
394 
396  int16_t quant_table[MAX_CONTEXT_INPUTS][256])
397 {
398  int i;
399  int context_count = 1;
400 
401  for (i = 0; i < 5; i++) {
402  int ret = read_quant_table(c, quant_table[i], context_count);
403  if (ret < 0)
404  return ret;
405  context_count *= ret;
406  if (context_count > 32768U) {
407  return AVERROR_INVALIDDATA;
408  }
409  }
410  return (context_count + 1) / 2;
411 }
412 
414 {
415  RangeCoder *const c = &f->c;
417  int i, j, k, ret;
418  uint8_t state2[32][CONTEXT_SIZE];
419  unsigned crc = 0;
420 
421  memset(state2, 128, sizeof(state2));
422  memset(state, 128, sizeof(state));
423 
425  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
426 
427  f->version = get_symbol(c, state, 0);
428  if (f->version < 2) {
429  av_log(f->avctx, AV_LOG_ERROR, "Invalid version in global header\n");
430  return AVERROR_INVALIDDATA;
431  }
432  if (f->version > 2) {
433  c->bytestream_end -= 4;
434  f->micro_version = get_symbol(c, state, 0);
435  if (f->micro_version < 0)
436  return AVERROR_INVALIDDATA;
437  }
438  f->ac = get_symbol(c, state, 0);
439 
440  if (f->ac == AC_RANGE_CUSTOM_TAB) {
441  for (i = 1; i < 256; i++)
442  f->state_transition[i] = get_symbol(c, state, 1) + c->one_state[i];
443  }
444 
445  f->colorspace = get_symbol(c, state, 0); //YUV cs type
446  f->avctx->bits_per_raw_sample = get_symbol(c, state, 0);
447  f->chroma_planes = get_rac(c, state);
448  f->chroma_h_shift = get_symbol(c, state, 0);
449  f->chroma_v_shift = get_symbol(c, state, 0);
450  f->transparency = get_rac(c, state);
451  f->plane_count = 1 + (f->chroma_planes || f->version<4) + f->transparency;
452  f->num_h_slices = 1 + get_symbol(c, state, 0);
453  f->num_v_slices = 1 + get_symbol(c, state, 0);
454 
455  if (f->chroma_h_shift > 4U || f->chroma_v_shift > 4U) {
456  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
458  return AVERROR_INVALIDDATA;
459  }
460 
461  if (f->num_h_slices > (unsigned)f->width || !f->num_h_slices ||
462  f->num_v_slices > (unsigned)f->height || !f->num_v_slices
463  ) {
464  av_log(f->avctx, AV_LOG_ERROR, "slice count invalid\n");
465  return AVERROR_INVALIDDATA;
466  }
467 
468  f->quant_table_count = get_symbol(c, state, 0);
469  if (f->quant_table_count > (unsigned)MAX_QUANT_TABLES || !f->quant_table_count) {
470  av_log(f->avctx, AV_LOG_ERROR, "quant table count %d is invalid\n", f->quant_table_count);
471  f->quant_table_count = 0;
472  return AVERROR_INVALIDDATA;
473  }
474 
475  for (i = 0; i < f->quant_table_count; i++) {
477  if (f->context_count[i] < 0) {
478  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
479  return AVERROR_INVALIDDATA;
480  }
481  }
482  if ((ret = ff_ffv1_allocate_initial_states(f)) < 0)
483  return ret;
484 
485  for (i = 0; i < f->quant_table_count; i++)
486  if (get_rac(c, state)) {
487  for (j = 0; j < f->context_count[i]; j++)
488  for (k = 0; k < CONTEXT_SIZE; k++) {
489  int pred = j ? f->initial_states[i][j - 1][k] : 128;
490  f->initial_states[i][j][k] =
491  (pred + get_symbol(c, state2[k], 1)) & 0xFF;
492  }
493  }
494 
495  if (f->version > 2) {
496  f->ec = get_symbol(c, state, 0);
497  if (f->micro_version > 2)
498  f->intra = get_symbol(c, state, 0);
499  }
500 
501  if (f->version > 2) {
502  unsigned v;
505  if (v || f->avctx->extradata_size < 4) {
506  av_log(f->avctx, AV_LOG_ERROR, "CRC mismatch %X!\n", v);
507  return AVERROR_INVALIDDATA;
508  }
509  crc = AV_RB32(f->avctx->extradata + f->avctx->extradata_size - 4);
510  }
511 
512  if (f->avctx->debug & FF_DEBUG_PICT_INFO)
514  "global: ver:%d.%d, coder:%d, colorspace: %d bpr:%d chroma:%d(%d:%d), alpha:%d slices:%dx%d qtabs:%d ec:%d intra:%d CRC:0x%08X\n",
515  f->version, f->micro_version,
516  f->ac,
517  f->colorspace,
520  f->transparency,
521  f->num_h_slices, f->num_v_slices,
523  f->ec,
524  f->intra,
525  crc
526  );
527  return 0;
528 }
529 
531 {
533  int i, j, context_count = -1; //-1 to avoid warning
534  RangeCoder *const c = &f->slice_context[0]->c;
535 
536  memset(state, 128, sizeof(state));
537 
538  if (f->version < 2) {
540  unsigned v= get_symbol(c, state, 0);
541  if (v >= 2) {
542  av_log(f->avctx, AV_LOG_ERROR, "invalid version %d in ver01 header\n", v);
543  return AVERROR_INVALIDDATA;
544  }
545  f->version = v;
546  f->ac = get_symbol(c, state, 0);
547 
548  if (f->ac == AC_RANGE_CUSTOM_TAB) {
549  for (i = 1; i < 256; i++) {
550  int st = get_symbol(c, state, 1) + c->one_state[i];
551  if (st < 1 || st > 255) {
552  av_log(f->avctx, AV_LOG_ERROR, "invalid state transition %d\n", st);
553  return AVERROR_INVALIDDATA;
554  }
555  f->state_transition[i] = st;
556  }
557  }
558 
559  colorspace = get_symbol(c, state, 0); //YUV cs type
560  bits_per_raw_sample = f->version > 0 ? get_symbol(c, state, 0) : f->avctx->bits_per_raw_sample;
561  chroma_planes = get_rac(c, state);
562  chroma_h_shift = get_symbol(c, state, 0);
563  chroma_v_shift = get_symbol(c, state, 0);
564  transparency = get_rac(c, state);
565  if (colorspace == 0 && f->avctx->skip_alpha)
566  transparency = 0;
567 
568  if (f->plane_count) {
569  if (colorspace != f->colorspace ||
570  bits_per_raw_sample != f->avctx->bits_per_raw_sample ||
571  chroma_planes != f->chroma_planes ||
572  chroma_h_shift != f->chroma_h_shift ||
573  chroma_v_shift != f->chroma_v_shift ||
574  transparency != f->transparency) {
575  av_log(f->avctx, AV_LOG_ERROR, "Invalid change of global parameters\n");
576  return AVERROR_INVALIDDATA;
577  }
578  }
579 
580  if (chroma_h_shift > 4U || chroma_v_shift > 4U) {
581  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
582  chroma_h_shift, chroma_v_shift);
583  return AVERROR_INVALIDDATA;
584  }
585 
586  f->colorspace = colorspace;
592 
593  f->plane_count = 2 + f->transparency;
594  }
595 
596  if (f->colorspace == 0) {
597  if (!f->transparency && !f->chroma_planes) {
598  if (f->avctx->bits_per_raw_sample <= 8)
600  else if (f->avctx->bits_per_raw_sample == 9) {
601  f->packed_at_lsb = 1;
603  } else if (f->avctx->bits_per_raw_sample == 10) {
604  f->packed_at_lsb = 1;
606  } else if (f->avctx->bits_per_raw_sample == 12) {
607  f->packed_at_lsb = 1;
609  } else if (f->avctx->bits_per_raw_sample == 16) {
610  f->packed_at_lsb = 1;
612  } else if (f->avctx->bits_per_raw_sample < 16) {
614  } else
615  return AVERROR(ENOSYS);
616  } else if (f->transparency && !f->chroma_planes) {
617  if (f->avctx->bits_per_raw_sample <= 8)
619  else
620  return AVERROR(ENOSYS);
621  } else if (f->avctx->bits_per_raw_sample<=8 && !f->transparency) {
622  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
623  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P; break;
624  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P; break;
625  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P; break;
626  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P; break;
627  case 0x20: f->avctx->pix_fmt = AV_PIX_FMT_YUV411P; break;
628  case 0x22: f->avctx->pix_fmt = AV_PIX_FMT_YUV410P; break;
629  }
630  } else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency) {
631  switch(16*f->chroma_h_shift + f->chroma_v_shift) {
632  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P; break;
633  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P; break;
634  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P; break;
635  }
636  } else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency) {
637  f->packed_at_lsb = 1;
638  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
639  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P9; break;
640  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P9; break;
641  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P9; break;
642  }
643  } else if (f->avctx->bits_per_raw_sample == 9 && f->transparency) {
644  f->packed_at_lsb = 1;
645  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
646  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P9; break;
647  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P9; break;
648  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P9; break;
649  }
650  } else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency) {
651  f->packed_at_lsb = 1;
652  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
653  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P10; break;
654  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P10; break;
655  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P10; break;
656  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P10; break;
657  }
658  } else if (f->avctx->bits_per_raw_sample == 10 && f->transparency) {
659  f->packed_at_lsb = 1;
660  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
661  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P10; break;
662  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P10; break;
663  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P10; break;
664  }
665  } else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency) {
666  f->packed_at_lsb = 1;
667  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
668  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P12; break;
669  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P12; break;
670  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P12; break;
671  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P12; break;
672  }
673  } else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency) {
674  f->packed_at_lsb = 1;
675  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
676  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P14; break;
677  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P14; break;
678  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P14; break;
679  }
680  } else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency){
681  f->packed_at_lsb = 1;
682  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
683  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P16; break;
684  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P16; break;
685  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P16; break;
686  }
687  } else if (f->avctx->bits_per_raw_sample == 16 && f->transparency){
688  f->packed_at_lsb = 1;
689  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
690  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P16; break;
691  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P16; break;
692  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P16; break;
693  }
694  }
695  } else if (f->colorspace == 1) {
696  if (f->chroma_h_shift || f->chroma_v_shift) {
698  "chroma subsampling not supported in this colorspace\n");
699  return AVERROR(ENOSYS);
700  }
701  if ( f->avctx->bits_per_raw_sample <= 8 && !f->transparency)
703  else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency)
705  else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency)
707  else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency)
709  else if (f->avctx->bits_per_raw_sample == 10 && f->transparency)
711  else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency)
713  else if (f->avctx->bits_per_raw_sample == 12 && f->transparency)
715  else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency)
717  else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency) {
719  f->use32bit = 1;
720  }
721  else if (f->avctx->bits_per_raw_sample == 16 && f->transparency) {
723  f->use32bit = 1;
724  }
725  } else {
726  av_log(f->avctx, AV_LOG_ERROR, "colorspace not supported\n");
727  return AVERROR(ENOSYS);
728  }
729  if (f->avctx->pix_fmt == AV_PIX_FMT_NONE) {
730  av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
731  return AVERROR(ENOSYS);
732  }
733 
734  ff_dlog(f->avctx, "%d %d %d\n",
736  if (f->version < 2) {
737  context_count = read_quant_tables(c, f->quant_table);
738  if (context_count < 0) {
739  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
740  return AVERROR_INVALIDDATA;
741  }
743  } else if (f->version < 3) {
744  f->slice_count = get_symbol(c, state, 0);
745  } else {
746  const uint8_t *p = c->bytestream_end;
747  for (f->slice_count = 0;
748  f->slice_count < MAX_SLICES && 3 + 5*!!f->ec < p - c->bytestream_start;
749  f->slice_count++) {
750  int trailer = 3 + 5*!!f->ec;
751  int size = AV_RB24(p-trailer);
752  if (size + trailer > p - c->bytestream_start)
753  break;
754  p -= size + trailer;
755  }
756  }
757  if (f->slice_count > (unsigned)MAX_SLICES || f->slice_count <= 0 || f->slice_count > f->max_slice_count) {
758  av_log(f->avctx, AV_LOG_ERROR, "slice count %d is invalid (max=%d)\n", f->slice_count, f->max_slice_count);
759  return AVERROR_INVALIDDATA;
760  }
761 
762  for (j = 0; j < f->slice_count; j++) {
763  FFV1Context *fs = f->slice_context[j];
764  fs->ac = f->ac;
765  fs->packed_at_lsb = f->packed_at_lsb;
766 
767  fs->slice_damaged = 0;
768 
769  if (f->version == 2) {
770  fs->slice_x = get_symbol(c, state, 0) * f->width ;
771  fs->slice_y = get_symbol(c, state, 0) * f->height;
772  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
773  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
774 
775  fs->slice_x /= f->num_h_slices;
776  fs->slice_y /= f->num_v_slices;
777  fs->slice_width = fs->slice_width / f->num_h_slices - fs->slice_x;
778  fs->slice_height = fs->slice_height / f->num_v_slices - fs->slice_y;
779  if ((unsigned)fs->slice_width > f->width ||
780  (unsigned)fs->slice_height > f->height)
781  return AVERROR_INVALIDDATA;
782  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
783  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
784  return AVERROR_INVALIDDATA;
785  }
786 
787  for (i = 0; i < f->plane_count; i++) {
788  PlaneContext *const p = &fs->plane[i];
789 
790  if (f->version == 2) {
791  int idx = get_symbol(c, state, 0);
792  if (idx > (unsigned)f->quant_table_count) {
794  "quant_table_index out of range\n");
795  return AVERROR_INVALIDDATA;
796  }
797  p->quant_table_index = idx;
798  memcpy(p->quant_table, f->quant_tables[idx],
799  sizeof(p->quant_table));
800  context_count = f->context_count[idx];
801  } else {
802  memcpy(p->quant_table, f->quant_table, sizeof(p->quant_table));
803  }
804 
805  if (f->version <= 2) {
806  av_assert0(context_count >= 0);
807  if (p->context_count < context_count) {
808  av_freep(&p->state);
809  av_freep(&p->vlc_state);
810  }
812  }
813  }
814  }
815  return 0;
816 }
817 
819 {
820  FFV1Context *f = avctx->priv_data;
821  int ret;
822 
823  if ((ret = ff_ffv1_common_init(avctx)) < 0)
824  return ret;
825 
826  if (avctx->extradata_size > 0 && (ret = read_extra_header(f)) < 0)
827  return ret;
828 
829  if ((ret = ff_ffv1_init_slice_contexts(f)) < 0)
830  return ret;
831 
832  avctx->internal->allocate_progress = 1;
833 
834  return 0;
835 }
836 
837 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
838 {
839  uint8_t *buf = avpkt->data;
840  int buf_size = avpkt->size;
841  FFV1Context *f = avctx->priv_data;
842  RangeCoder *const c = &f->slice_context[0]->c;
843  int i, ret;
844  uint8_t keystate = 128;
845  uint8_t *buf_p;
846  AVFrame *p;
847 
848  if (f->last_picture.f)
851 
852  f->cur = p = f->picture.f;
853 
854  if (f->version < 3 && avctx->field_order > AV_FIELD_PROGRESSIVE) {
855  /* we have interlaced material flagged in container */
856  p->interlaced_frame = 1;
857  if (avctx->field_order == AV_FIELD_TT || avctx->field_order == AV_FIELD_TB)
858  p->top_field_first = 1;
859  }
860 
861  f->avctx = avctx;
862  ff_init_range_decoder(c, buf, buf_size);
863  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
864 
865  p->pict_type = AV_PICTURE_TYPE_I; //FIXME I vs. P
866  if (get_rac(c, &keystate)) {
867  p->key_frame = 1;
868  f->key_frame_ok = 0;
869  if ((ret = read_header(f)) < 0)
870  return ret;
871  f->key_frame_ok = 1;
872  } else {
873  if (!f->key_frame_ok) {
874  av_log(avctx, AV_LOG_ERROR,
875  "Cannot decode non-keyframe without valid keyframe\n");
876  return AVERROR_INVALIDDATA;
877  }
878  p->key_frame = 0;
879  }
880 
881  if ((ret = ff_thread_get_buffer(avctx, &f->picture, AV_GET_BUFFER_FLAG_REF)) < 0)
882  return ret;
883 
884  if (avctx->debug & FF_DEBUG_PICT_INFO)
885  av_log(avctx, AV_LOG_DEBUG, "ver:%d keyframe:%d coder:%d ec:%d slices:%d bps:%d\n",
886  f->version, p->key_frame, f->ac, f->ec, f->slice_count, f->avctx->bits_per_raw_sample);
887 
888  ff_thread_finish_setup(avctx);
889 
890  buf_p = buf + buf_size;
891  for (i = f->slice_count - 1; i >= 0; i--) {
892  FFV1Context *fs = f->slice_context[i];
893  int trailer = 3 + 5*!!f->ec;
894  int v;
895 
896  if (i || f->version > 2) v = AV_RB24(buf_p-trailer) + trailer;
897  else v = buf_p - c->bytestream_start;
898  if (buf_p - c->bytestream_start < v) {
899  av_log(avctx, AV_LOG_ERROR, "Slice pointer chain broken\n");
900  ff_thread_report_progress(&f->picture, INT_MAX, 0);
901  return AVERROR_INVALIDDATA;
902  }
903  buf_p -= v;
904 
905  if (f->ec) {
906  unsigned crc = av_crc(av_crc_get_table(AV_CRC_32_IEEE), 0, buf_p, v);
907  if (crc) {
908  int64_t ts = avpkt->pts != AV_NOPTS_VALUE ? avpkt->pts : avpkt->dts;
909  av_log(f->avctx, AV_LOG_ERROR, "slice CRC mismatch %X!", crc);
910  if (ts != AV_NOPTS_VALUE && avctx->pkt_timebase.num) {
911  av_log(f->avctx, AV_LOG_ERROR, "at %f seconds\n", ts*av_q2d(avctx->pkt_timebase));
912  } else if (ts != AV_NOPTS_VALUE) {
913  av_log(f->avctx, AV_LOG_ERROR, "at %"PRId64"\n", ts);
914  } else {
915  av_log(f->avctx, AV_LOG_ERROR, "\n");
916  }
917  fs->slice_damaged = 1;
918  }
919  if (avctx->debug & FF_DEBUG_PICT_INFO) {
920  av_log(avctx, AV_LOG_DEBUG, "slice %d, CRC: 0x%08"PRIX32"\n", i, AV_RB32(buf_p + v - 4));
921  }
922  }
923 
924  if (i) {
925  ff_init_range_decoder(&fs->c, buf_p, v);
926  } else
927  fs->c.bytestream_end = buf_p + v;
928 
929  fs->avctx = avctx;
930  fs->cur = p;
931  }
932 
933  avctx->execute(avctx,
934  decode_slice,
935  &f->slice_context[0],
936  NULL,
937  f->slice_count,
938  sizeof(void*));
939 
940  for (i = f->slice_count - 1; i >= 0; i--) {
941  FFV1Context *fs = f->slice_context[i];
942  int j;
943  if (fs->slice_damaged && f->last_picture.f->data[0]) {
945  const uint8_t *src[4];
946  uint8_t *dst[4];
947  ff_thread_await_progress(&f->last_picture, INT_MAX, 0);
948  for (j = 0; j < desc->nb_components; j++) {
949  int pixshift = desc->comp[j].depth > 8;
950  int sh = (j == 1 || j == 2) ? f->chroma_h_shift : 0;
951  int sv = (j == 1 || j == 2) ? f->chroma_v_shift : 0;
952  dst[j] = p->data[j] + p->linesize[j] *
953  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
954  src[j] = f->last_picture.f->data[j] + f->last_picture.f->linesize[j] *
955  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
956 
957  }
958  if (desc->flags & AV_PIX_FMT_FLAG_PAL ||
959  desc->flags & FF_PSEUDOPAL) {
960  dst[1] = p->data[1];
961  src[1] = f->last_picture.f->data[1];
962  }
963  av_image_copy(dst, p->linesize, src,
964  f->last_picture.f->linesize,
965  avctx->pix_fmt,
966  fs->slice_width,
967  fs->slice_height);
968  }
969  }
970  ff_thread_report_progress(&f->picture, INT_MAX, 0);
971 
972  f->picture_number++;
973 
974  if (f->last_picture.f)
976  f->cur = NULL;
977  if ((ret = av_frame_ref(data, f->picture.f)) < 0)
978  return ret;
979 
980  *got_frame = 1;
981 
982  return buf_size;
983 }
984 
985 #if HAVE_THREADS
987 {
988  FFV1Context *f = avctx->priv_data;
989  int i, ret;
990 
991  f->picture.f = NULL;
992  f->last_picture.f = NULL;
993  f->sample_buffer = NULL;
994  f->max_slice_count = 0;
995  f->slice_count = 0;
996 
997  for (i = 0; i < f->quant_table_count; i++) {
998  av_assert0(f->version > 1);
1000  f->context_count[i] * sizeof(*f->initial_states[i]));
1001  }
1002 
1003  f->picture.f = av_frame_alloc();
1004  f->last_picture.f = av_frame_alloc();
1005 
1006  if ((ret = ff_ffv1_init_slice_contexts(f)) < 0)
1007  return ret;
1008 
1009  return 0;
1010 }
1011 #endif
1012 
1013 static void copy_fields(FFV1Context *fsdst, FFV1Context *fssrc, FFV1Context *fsrc)
1014 {
1015  fsdst->version = fsrc->version;
1016  fsdst->micro_version = fsrc->micro_version;
1017  fsdst->chroma_planes = fsrc->chroma_planes;
1018  fsdst->chroma_h_shift = fsrc->chroma_h_shift;
1019  fsdst->chroma_v_shift = fsrc->chroma_v_shift;
1020  fsdst->transparency = fsrc->transparency;
1021  fsdst->plane_count = fsrc->plane_count;
1022  fsdst->ac = fsrc->ac;
1023  fsdst->colorspace = fsrc->colorspace;
1024 
1025  fsdst->ec = fsrc->ec;
1026  fsdst->intra = fsrc->intra;
1027  fsdst->slice_damaged = fssrc->slice_damaged;
1028  fsdst->key_frame_ok = fsrc->key_frame_ok;
1029 
1031  fsdst->packed_at_lsb = fsrc->packed_at_lsb;
1032  fsdst->slice_count = fsrc->slice_count;
1033  if (fsrc->version<3){
1034  fsdst->slice_x = fssrc->slice_x;
1035  fsdst->slice_y = fssrc->slice_y;
1036  fsdst->slice_width = fssrc->slice_width;
1037  fsdst->slice_height = fssrc->slice_height;
1038  }
1039 }
1040 
1041 #if HAVE_THREADS
1042 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1043 {
1044  FFV1Context *fsrc = src->priv_data;
1045  FFV1Context *fdst = dst->priv_data;
1046  int i, ret;
1047 
1048  if (dst == src)
1049  return 0;
1050 
1051  {
1055  memcpy(initial_states, fdst->initial_states, sizeof(fdst->initial_states));
1056  memcpy(slice_context, fdst->slice_context , sizeof(fdst->slice_context));
1057 
1058  memcpy(fdst, fsrc, sizeof(*fdst));
1059  memcpy(fdst->initial_states, initial_states, sizeof(fdst->initial_states));
1060  memcpy(fdst->slice_context, slice_context , sizeof(fdst->slice_context));
1061  fdst->picture = picture;
1062  fdst->last_picture = last_picture;
1063  for (i = 0; i<fdst->num_h_slices * fdst->num_v_slices; i++) {
1064  FFV1Context *fssrc = fsrc->slice_context[i];
1065  FFV1Context *fsdst = fdst->slice_context[i];
1066  copy_fields(fsdst, fssrc, fsrc);
1067  }
1068  av_assert0(!fdst->plane[0].state);
1069  av_assert0(!fdst->sample_buffer);
1070  }
1071 
1072  av_assert1(fdst->max_slice_count == fsrc->max_slice_count);
1073 
1074 
1075  ff_thread_release_buffer(dst, &fdst->picture);
1076  if (fsrc->picture.f->data[0]) {
1077  if ((ret = ff_thread_ref_frame(&fdst->picture, &fsrc->picture)) < 0)
1078  return ret;
1079  }
1080 
1081  fdst->fsrc = fsrc;
1082 
1083  return 0;
1084 }
1085 #endif
1086 
1088  .name = "ffv1",
1089  .long_name = NULL_IF_CONFIG_SMALL("FFmpeg video codec #1"),
1090  .type = AVMEDIA_TYPE_VIDEO,
1091  .id = AV_CODEC_ID_FFV1,
1092  .priv_data_size = sizeof(FFV1Context),
1093  .init = decode_init,
1094  .close = ff_ffv1_close,
1095  .decode = decode_frame,
1097  .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
1098  .capabilities = AV_CODEC_CAP_DR1 /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/ |
1100  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP
1101 };
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:132
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
static av_always_inline int fold(int diff, int bits)
Definition: ffv1.h:151
#define NULL
Definition: coverity.c:32
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:430
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:389
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:422
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:424
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:397
8 bits gray, 8 bits alpha
Definition: pixfmt.h:143
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:407
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:425
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVFrame * f
Definition: thread.h:35
int16_t quant_table[MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:69
int quant_table_count
Definition: ffv1.h:126
else temp
Definition: vf_mcdeint.c:256
const char * desc
Definition: nvenc.c:68
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:246
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have add an init_thread_copy() which re-allocates them for other threads.Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities.There will be very little speed gain at this point but it should work.If there are inter-frame dependencies
int slice_height
Definition: ffv1.h:134
#define MAX_CONTEXT_INPUTS
Definition: ffv1.h:54
int16_t * sample_buffer
Definition: ffv1.h:111
int version
Definition: ffv1.h:87
int micro_version
Definition: ffv1.h:88
Range coder.
uint8_t * bytestream_end
Definition: rangecoder.h:44
int num
Numerator.
Definition: rational.h:59
int size
Definition: avcodec.h:1483
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:403
static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
Definition: ffv1dec.c:368
static av_flatten int get_symbol_inline(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:42
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1780
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:367
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:391
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: ffv1dec.c:837
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:2801
FF Video Codec 1 (a lossless codec)
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them.reget_buffer() and buffer age optimizations no longer work.*The contents of buffers must not be written to after ff_thread_report_progress() has been called on them.This includes draw_edges().Porting codecs to frame threading
#define src
Definition: vp8dsp.c:254
#define sample
int height
Definition: ffv1.h:89
AVCodec.
Definition: avcodec.h:3494
static const struct @322 planes[]
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
uint8_t one_state[256]
Definition: rangecoder.h:41
int slice_reset_contexts
Definition: ffv1.h:137
enum AVPictureType last_picture
Definition: movenc.c:68
int slice_rct_by_coef
Definition: ffv1.h:139
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:368
int plane_count
Definition: ffv1.h:100
int slice_damaged
Definition: ffv1.h:118
static int is_input_end(FFV1Context *s)
Definition: ffv1dec.c:96
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:369
ThreadFrame picture
Definition: ffv1.h:96
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
static int read_quant_tables(RangeCoder *c, int16_t quant_table[MAX_CONTEXT_INPUTS][256])
Definition: ffv1dec.c:395
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define av_cold
Definition: attributes.h:82
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread.If the codec allocates writable tables in its init()
static int get_rac(RangeCoder *c, uint8_t *const state)
Definition: rangecoder.h:136
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
AVOptions.
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:2656
int8_t bias
Definition: ffv1.h:64
#define f(width, name)
Definition: cbs_vp9.c:255
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
RangeCoder c
Definition: ffv1.h:82
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
av_cold int ff_ffv1_common_init(AVCodecContext *avctx)
Definition: ffv1.c:42
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1671
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:421
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:87
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:402
int slice_y
Definition: ffv1.h:136
uint8_t(*[MAX_QUANT_TABLES] initial_states)[32]
Definition: ffv1.h:108
ThreadFrame last_picture
Definition: ffv1.h:96
Public header for CRC hash function implementation.
av_cold int ff_ffv1_close(AVCodecContext *avctx)
Definition: ffv1.c:210
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:287
#define height
uint8_t * data
Definition: avcodec.h:1482
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
uint8_t count
Definition: ffv1.h:65
int ff_thread_ref_frame(ThreadFrame *dst, ThreadFrame *src)
Definition: utils.c:1821
#define ff_dlog(a,...)
bitstream reader API header.
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:400
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:442
VlcState * vlc_state
Definition: ffv1.h:73
ptrdiff_t size
Definition: opengl_enc.c:100
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:392
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:429
high precision timer, useful to profile code
static struct @321 state
#define av_log(a,...)
int bits_per_raw_sample
Definition: ffv1.h:122
int slice_width
Definition: ffv1.h:133
GetBitContext gb
Definition: ffv1.h:83
#define U(x)
Definition: vp56_arith.h:37
AVFrame * cur
Definition: ffv1.h:99
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:3124
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:849
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:176
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static int decode_slice_header(FFV1Context *f, FFV1Context *fs)
Definition: ffv1dec.c:167
int16_t quant_tables[MAX_QUANT_TABLES][MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:105
int skip_alpha
Skip processing alpha if supported by codec.
Definition: avcodec.h:3183
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
int context_count
Definition: ffv1.h:71
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:431
const char * arg
Definition: jacosubdec.c:66
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:408
simple assert() macros that are a bit more flexible than ISO C assert().
const char * name
Name of the codec implementation.
Definition: avcodec.h:3501
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:390
uint8_t bits
Definition: vp3data.h:202
int ff_ffv1_allocate_initial_states(FFV1Context *f)
Definition: ffv1.c:167
#define MAX_SLICES
Definition: dxva2_hevc.c:29
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
Definition: mem.c:283
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1042
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:387
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:409
static int get_vlc_symbol(GetBitContext *gb, VlcState *const state, int bits)
Definition: ffv1dec.c:71
uint8_t * bytestream
Definition: rangecoder.h:43
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:225
int ac
1=range coder <-> 0=golomb rice
Definition: ffv1.h:101
int16_t quant_table[MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:104
#define AC_RANGE_CUSTOM_TAB
Definition: ffv1.h:58
int run_index
Definition: ffv1.h:109
Definition: ffv1.h:61
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:385
#define av_flatten
Definition: attributes.h:88
static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:66
uint8_t state_transition[256]
Definition: ffv1.h:107
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
static void copy_fields(FFV1Context *fsdst, FFV1Context *fssrc, FFV1Context *fsrc)
Definition: ffv1dec.c:1013
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:378
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:406
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:371
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
static int decode_plane(FFV1Context *s, uint8_t *src, int w, int h, int stride, int plane_index, int pixel_stride)
Definition: ffv1dec.c:119
#define FFMIN(a, b)
Definition: common.h:96
int num_h_slices
Definition: ffv1.h:132
#define width
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards.If some code can't be moved
uint8_t w
Definition: llviddspenc.c:38
#define MAX_QUANT_TABLES
Definition: ffv1.h:53
int colorspace
Definition: ffv1.h:110
static float quant_table[96]
Definition: binkaudio.c:43
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
static void update_vlc_state(VlcState *const state, const int v)
Definition: ffv1.h:162
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
int slice_count
Definition: ffv1.h:129
int max_slice_count
Definition: ffv1.h:130
void ff_build_rac_states(RangeCoder *c, int factor, int max_p)
Definition: rangecoder.c:68
#define s(width, name)
Definition: cbs_vp9.c:257
av_cold int ff_ffv1_init_slice_contexts(FFV1Context *f)
Definition: ffv1.c:117
av_cold int ff_ffv1_init_slice_state(FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:67
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:426
int ac_byte_count
number of bytes used for AC coding
Definition: ffv1.h:102
int16_t drift
Definition: ffv1.h:62
int packed_at_lsb
Definition: ffv1.h:123
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:386
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:405
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:530
if(ret)
static const float pred[4]
Definition: siprdata.h:259
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:398
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: avcodec.h:1046
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:395
int context_count[MAX_QUANT_TABLES]
Definition: ffv1.h:106
Libavcodec external API header.
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_RB24
Definition: bytestream.h:87
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have add an so the codec calls ff_thread_report set AVCodecInternal allocate_progress The frames must then be freed with ff_thread_release_buffer().Otherwise leave it at zero and decode directly into the user-supplied frames.Call ff_thread_report_progress() after some part of the current picture has decoded.A good place to put this is where draw_horiz_band() is called-add this if it isn't called anywhere
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:177
int debug
debug
Definition: avcodec.h:2655
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:1570
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:360
int intra
Definition: ffv1.h:117
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:383
void * buf
Definition: avisynth_c.h:766
int extradata_size
Definition: avcodec.h:1672
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:387
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
int use32bit
Definition: ffv1.h:114
Rational number (pair of numerator and denominator).
Definition: rational.h:58
av_cold void ff_init_range_decoder(RangeCoder *c, const uint8_t *buf, int buf_size)
Definition: rangecoder.c:53
#define AC_GOLOMB_RICE
Definition: ffv1.h:56
#define MAX_OVERREAD
Definition: lagarithrac.h:51
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:659
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:393
int picture_number
Definition: ffv1.h:94
uint16_t error_sum
Definition: ffv1.h:63
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:384
int allocate_progress
Whether to allocate progress for frame threading.
Definition: internal.h:151
int key_frame_ok
Definition: ffv1.h:119
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:396
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:404
#define CONTEXT_SIZE
Definition: ffv1.h:51
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:388
int quant_table_index
Definition: ffv1.h:70
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:394
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:104
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
Y , 8bpp.
Definition: pixfmt.h:74
common internal api header.
int overread
Definition: rangecoder.h:45
void ff_ffv1_clear_slice_state(FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:182
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:423
uint8_t(* state)[CONTEXT_SIZE]
Definition: ffv1.h:72
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
int den
Denominator.
Definition: rational.h:60
int slice_coding_mode
Definition: ffv1.h:138
uint8_t * bytestream_start
Definition: rangecoder.h:42
static av_cold int decode_init(AVCodecContext *avctx)
Definition: ffv1dec.c:818
void * priv_data
Definition: avcodec.h:1597
int chroma_h_shift
Definition: ffv1.h:91
PlaneContext plane[MAX_PLANES]
Definition: ffv1.h:103
#define FF_PSEUDOPAL
Definition: internal.h:367
int transparency
Definition: ffv1.h:92
#define av_free(p)
struct FFV1Context * fsrc
Definition: ffv1.h:97
int chroma_v_shift
Definition: ffv1.h:91
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:447
int len
int chroma_planes
Definition: ffv1.h:90
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1605
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:373
struct FFV1Context * slice_context[MAX_SLICES]
Definition: ffv1.h:128
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1481
#define av_noinline
Definition: attributes.h:62
#define av_freep(p)
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:2227
#define av_malloc_array(a, b)
#define FFSWAP(type, a, b)
Definition: common.h:99
int ec
Definition: ffv1.h:116
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
static int get_sr_golomb(GetBitContext *gb, int k, int limit, int esc_len)
read signed golomb rice code (ffv1).
Definition: golomb.h:529
int num_v_slices
Definition: ffv1.h:131
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:2869
exp golomb vlc stuff
This structure stores compressed data.
Definition: avcodec.h:1459
static int read_extra_header(FFV1Context *f)
Definition: ffv1dec.c:413
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1181
AVCodecContext * avctx
Definition: ffv1.h:81
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:986
int slice_x
Definition: ffv1.h:135
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:399
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1475
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVCodec ff_ffv1_decoder
Definition: ffv1dec.c:1087
int step
Number of elements between 2 horizontally consecutive pixels.
Definition: pixdesc.h:41
int width
Definition: ffv1.h:89
#define AV_PIX_FMT_0RGB32
Definition: pixfmt.h:364
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
int slice_rct_ry_coef
Definition: ffv1.h:140