FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
utvideodec.c
Go to the documentation of this file.
1 /*
2  * Ut Video decoder
3  * Copyright (c) 2011 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Ut Video decoder
25  */
26 
27 #include <inttypes.h>
28 #include <stdlib.h>
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 
32 #include "libavutil/intreadwrite.h"
33 #include "avcodec.h"
34 #include "bswapdsp.h"
35 #include "bytestream.h"
36 #include "get_bits.h"
37 #include "internal.h"
38 #include "thread.h"
39 #include "utvideo.h"
40 
41 static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
42 {
43  int i;
44  HuffEntry he[1024];
45  int last;
46  uint32_t codes[1024];
47  uint8_t bits[1024];
48  uint16_t syms[1024];
49  uint32_t code;
50 
51  *fsym = -1;
52  for (i = 0; i < 1024; i++) {
53  he[i].sym = i;
54  he[i].len = *src++;
55  }
56  qsort(he, 1024, sizeof(*he), ff_ut10_huff_cmp_len);
57 
58  if (!he[0].len) {
59  *fsym = he[0].sym;
60  return 0;
61  }
62 
63  last = 1023;
64  while (he[last].len == 255 && last)
65  last--;
66 
67  if (he[last].len > 32) {
68  return -1;
69  }
70 
71  code = 1;
72  for (i = last; i >= 0; i--) {
73  codes[i] = code >> (32 - he[i].len);
74  bits[i] = he[i].len;
75  syms[i] = he[i].sym;
76  code += 0x80000000u >> (he[i].len - 1);
77  }
78 #define VLC_BITS 11
79  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
80  bits, sizeof(*bits), sizeof(*bits),
81  codes, sizeof(*codes), sizeof(*codes),
82  syms, sizeof(*syms), sizeof(*syms), 0);
83 }
84 
85 static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
86 {
87  int i;
88  HuffEntry he[256];
89  int last;
90  uint32_t codes[256];
91  uint8_t bits[256];
92  uint8_t syms[256];
93  uint32_t code;
94 
95  *fsym = -1;
96  for (i = 0; i < 256; i++) {
97  he[i].sym = i;
98  he[i].len = *src++;
99  }
100  qsort(he, 256, sizeof(*he), ff_ut_huff_cmp_len);
101 
102  if (!he[0].len) {
103  *fsym = he[0].sym;
104  return 0;
105  }
106 
107  last = 255;
108  while (he[last].len == 255 && last)
109  last--;
110 
111  if (he[last].len > 32)
112  return -1;
113 
114  code = 1;
115  for (i = last; i >= 0; i--) {
116  codes[i] = code >> (32 - he[i].len);
117  bits[i] = he[i].len;
118  syms[i] = he[i].sym;
119  code += 0x80000000u >> (he[i].len - 1);
120  }
121 
122  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
123  bits, sizeof(*bits), sizeof(*bits),
124  codes, sizeof(*codes), sizeof(*codes),
125  syms, sizeof(*syms), sizeof(*syms), 0);
126 }
127 
128 static int decode_plane10(UtvideoContext *c, int plane_no,
129  uint16_t *dst, ptrdiff_t stride,
130  int width, int height,
131  const uint8_t *src, const uint8_t *huff,
132  int use_pred)
133 {
134  int i, j, slice, pix, ret;
135  int sstart, send;
136  VLC vlc;
137  GetBitContext gb;
138  int prev, fsym;
139 
140  if ((ret = build_huff10(huff, &vlc, &fsym)) < 0) {
141  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
142  return ret;
143  }
144  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
145  send = 0;
146  for (slice = 0; slice < c->slices; slice++) {
147  uint16_t *dest;
148 
149  sstart = send;
150  send = (height * (slice + 1) / c->slices);
151  dest = dst + sstart * stride;
152 
153  prev = 0x200;
154  for (j = sstart; j < send; j++) {
155  for (i = 0; i < width; i++) {
156  pix = fsym;
157  if (use_pred) {
158  prev += pix;
159  prev &= 0x3FF;
160  pix = prev;
161  }
162  dest[i] = pix;
163  }
164  dest += stride;
165  }
166  }
167  return 0;
168  }
169 
170  send = 0;
171  for (slice = 0; slice < c->slices; slice++) {
172  uint16_t *dest;
173  int slice_data_start, slice_data_end, slice_size;
174 
175  sstart = send;
176  send = (height * (slice + 1) / c->slices);
177  dest = dst + sstart * stride;
178 
179  // slice offset and size validation was done earlier
180  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
181  slice_data_end = AV_RL32(src + slice * 4);
182  slice_size = slice_data_end - slice_data_start;
183 
184  if (!slice_size) {
185  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
186  "yet a slice has a length of zero.\n");
187  goto fail;
188  }
189 
190  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
191  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
192  (uint32_t *)(src + slice_data_start + c->slices * 4),
193  (slice_data_end - slice_data_start + 3) >> 2);
194  init_get_bits(&gb, c->slice_bits, slice_size * 8);
195 
196  prev = 0x200;
197  for (j = sstart; j < send; j++) {
198  for (i = 0; i < width; i++) {
199  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
200  if (pix < 0) {
201  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
202  goto fail;
203  }
204  if (use_pred) {
205  prev += pix;
206  prev &= 0x3FF;
207  pix = prev;
208  }
209  dest[i] = pix;
210  }
211  dest += stride;
212  if (get_bits_left(&gb) < 0) {
214  "Slice decoding ran out of bits\n");
215  goto fail;
216  }
217  }
218  if (get_bits_left(&gb) > 32)
220  "%d bits left after decoding slice\n", get_bits_left(&gb));
221  }
222 
223  ff_free_vlc(&vlc);
224 
225  return 0;
226 fail:
227  ff_free_vlc(&vlc);
228  return AVERROR_INVALIDDATA;
229 }
230 
231 static int compute_cmask(int plane_no, int interlaced, enum AVPixelFormat pix_fmt)
232 {
233  const int is_luma = (pix_fmt == AV_PIX_FMT_YUV420P) && !plane_no;
234 
235  if (interlaced)
236  return ~(1 + 2 * is_luma);
237 
238  return ~is_luma;
239 }
240 
241 static int decode_plane(UtvideoContext *c, int plane_no,
242  uint8_t *dst, ptrdiff_t stride,
243  int width, int height,
244  const uint8_t *src, int use_pred)
245 {
246  int i, j, slice, pix;
247  int sstart, send;
248  VLC vlc;
249  GetBitContext gb;
250  int ret, prev, fsym;
251  const int cmask = compute_cmask(plane_no, c->interlaced, c->avctx->pix_fmt);
252 
253  if (c->pack) {
254  send = 0;
255  for (slice = 0; slice < c->slices; slice++) {
256  GetBitContext cbit, pbit;
257  uint8_t *dest, *p;
258 
259  ret = init_get_bits8(&cbit, c->control_stream[plane_no][slice], c->control_stream_size[plane_no][slice]);
260  if (ret < 0)
261  return ret;
262 
263  ret = init_get_bits8(&pbit, c->packed_stream[plane_no][slice], c->packed_stream_size[plane_no][slice]);
264  if (ret < 0)
265  return ret;
266 
267  sstart = send;
268  send = (height * (slice + 1) / c->slices) & cmask;
269  dest = dst + sstart * stride;
270 
271  for (p = dest; p < dst + send * stride; p += 8) {
272  int bits = get_bits_le(&cbit, 3);
273 
274  if (bits == 0) {
275  *(uint64_t *) p = 0;
276  } else {
277  uint32_t sub = 0x80 >> (8 - (bits + 1)), add;
278  int k;
279 
280  for (k = 0; k < 8; k++) {
281 
282  p[k] = get_bits_le(&pbit, bits + 1);
283  add = (~p[k] & sub) << (8 - bits);
284  p[k] -= sub;
285  p[k] += add;
286  }
287  }
288  }
289  }
290 
291  return 0;
292  }
293 
294  if (build_huff(src, &vlc, &fsym)) {
295  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
296  return AVERROR_INVALIDDATA;
297  }
298  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
299  send = 0;
300  for (slice = 0; slice < c->slices; slice++) {
301  uint8_t *dest;
302 
303  sstart = send;
304  send = (height * (slice + 1) / c->slices) & cmask;
305  dest = dst + sstart * stride;
306 
307  prev = 0x80;
308  for (j = sstart; j < send; j++) {
309  for (i = 0; i < width; i++) {
310  pix = fsym;
311  if (use_pred) {
312  prev += pix;
313  pix = prev;
314  }
315  dest[i] = pix;
316  }
317  dest += stride;
318  }
319  }
320  return 0;
321  }
322 
323  src += 256;
324 
325  send = 0;
326  for (slice = 0; slice < c->slices; slice++) {
327  uint8_t *dest;
328  int slice_data_start, slice_data_end, slice_size;
329 
330  sstart = send;
331  send = (height * (slice + 1) / c->slices) & cmask;
332  dest = dst + sstart * stride;
333 
334  // slice offset and size validation was done earlier
335  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
336  slice_data_end = AV_RL32(src + slice * 4);
337  slice_size = slice_data_end - slice_data_start;
338 
339  if (!slice_size) {
340  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
341  "yet a slice has a length of zero.\n");
342  goto fail;
343  }
344 
345  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
346  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
347  (uint32_t *)(src + slice_data_start + c->slices * 4),
348  (slice_data_end - slice_data_start + 3) >> 2);
349  init_get_bits(&gb, c->slice_bits, slice_size * 8);
350 
351  prev = 0x80;
352  for (j = sstart; j < send; j++) {
353  for (i = 0; i < width; i++) {
354  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
355  if (pix < 0) {
356  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
357  goto fail;
358  }
359  if (use_pred) {
360  prev += pix;
361  pix = prev;
362  }
363  dest[i] = pix;
364  }
365  if (get_bits_left(&gb) < 0) {
367  "Slice decoding ran out of bits\n");
368  goto fail;
369  }
370  dest += stride;
371  }
372  if (get_bits_left(&gb) > 32)
374  "%d bits left after decoding slice\n", get_bits_left(&gb));
375  }
376 
377  ff_free_vlc(&vlc);
378 
379  return 0;
380 fail:
381  ff_free_vlc(&vlc);
382  return AVERROR_INVALIDDATA;
383 }
384 
385 #undef A
386 #undef B
387 #undef C
388 
390  int width, int height, int slices, int rmode)
391 {
392  int i, j, slice;
393  int A, B, C;
394  uint8_t *bsrc;
395  int slice_start, slice_height;
396  const int cmask = ~rmode;
397 
398  for (slice = 0; slice < slices; slice++) {
399  slice_start = ((slice * height) / slices) & cmask;
400  slice_height = ((((slice + 1) * height) / slices) & cmask) -
401  slice_start;
402 
403  if (!slice_height)
404  continue;
405  bsrc = src + slice_start * stride;
406 
407  // first line - left neighbour prediction
408  bsrc[0] += 0x80;
409  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
410  bsrc += stride;
411  if (slice_height <= 1)
412  continue;
413  // second line - first element has top prediction, the rest uses median
414  C = bsrc[-stride];
415  bsrc[0] += C;
416  A = bsrc[0];
417  for (i = 1; i < FFMIN(width, 16); i++) { /* scalar loop (DSP need align 16) */
418  B = bsrc[i - stride];
419  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
420  C = B;
421  A = bsrc[i];
422  }
423  if (width > 16)
424  c->llviddsp.add_median_pred(bsrc + 16, bsrc - stride + 16,
425  bsrc + 16, width - 16, &A, &B);
426 
427  bsrc += stride;
428  // the rest of lines use continuous median prediction
429  for (j = 2; j < slice_height; j++) {
430  c->llviddsp.add_median_pred(bsrc, bsrc - stride,
431  bsrc, width, &A, &B);
432  bsrc += stride;
433  }
434  }
435 }
436 
437 /* UtVideo interlaced mode treats every two lines as a single one,
438  * so restoring function should take care of possible padding between
439  * two parts of the same "line".
440  */
442  int width, int height, int slices, int rmode)
443 {
444  int i, j, slice;
445  int A, B, C;
446  uint8_t *bsrc;
447  int slice_start, slice_height;
448  const int cmask = ~(rmode ? 3 : 1);
449  const ptrdiff_t stride2 = stride << 1;
450 
451  for (slice = 0; slice < slices; slice++) {
452  slice_start = ((slice * height) / slices) & cmask;
453  slice_height = ((((slice + 1) * height) / slices) & cmask) -
454  slice_start;
455  slice_height >>= 1;
456  if (!slice_height)
457  continue;
458 
459  bsrc = src + slice_start * stride;
460 
461  // first line - left neighbour prediction
462  bsrc[0] += 0x80;
463  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
464  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
465  bsrc += stride2;
466  if (slice_height <= 1)
467  continue;
468  // second line - first element has top prediction, the rest uses median
469  C = bsrc[-stride2];
470  bsrc[0] += C;
471  A = bsrc[0];
472  for (i = 1; i < FFMIN(width, 16); i++) { /* scalar loop (DSP need align 16) */
473  B = bsrc[i - stride2];
474  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
475  C = B;
476  A = bsrc[i];
477  }
478  if (width > 16)
479  c->llviddsp.add_median_pred(bsrc + 16, bsrc - stride2 + 16,
480  bsrc + 16, width - 16, &A, &B);
481 
482  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
483  bsrc + stride, width, &A, &B);
484  bsrc += stride2;
485  // the rest of lines use continuous median prediction
486  for (j = 2; j < slice_height; j++) {
487  c->llviddsp.add_median_pred(bsrc, bsrc - stride2,
488  bsrc, width, &A, &B);
489  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
490  bsrc + stride, width, &A, &B);
491  bsrc += stride2;
492  }
493  }
494 }
495 
497  int width, int height, int slices, int rmode)
498 {
499  int i, j, slice;
500  int A, B, C;
501  uint8_t *bsrc;
502  int slice_start, slice_height;
503  const int cmask = ~rmode;
504  int min_width = FFMIN(width, 32);
505 
506  for (slice = 0; slice < slices; slice++) {
507  slice_start = ((slice * height) / slices) & cmask;
508  slice_height = ((((slice + 1) * height) / slices) & cmask) -
509  slice_start;
510 
511  if (!slice_height)
512  continue;
513  bsrc = src + slice_start * stride;
514 
515  // first line - left neighbour prediction
516  bsrc[0] += 0x80;
517  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
518  bsrc += stride;
519  if (slice_height <= 1)
520  continue;
521  for (j = 1; j < slice_height; j++) {
522  // second line - first element has top prediction, the rest uses gradient
523  bsrc[0] = (bsrc[0] + bsrc[-stride]) & 0xFF;
524  for (i = 1; i < min_width; i++) { /* dsp need align 32 */
525  A = bsrc[i - stride];
526  B = bsrc[i - (stride + 1)];
527  C = bsrc[i - 1];
528  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
529  }
530  if (width > 32)
531  c->llviddsp.add_gradient_pred(bsrc + 32, stride, width - 32);
532  bsrc += stride;
533  }
534  }
535 }
536 
538  int width, int height, int slices, int rmode)
539 {
540  int i, j, slice;
541  int A, B, C;
542  uint8_t *bsrc;
543  int slice_start, slice_height;
544  const int cmask = ~(rmode ? 3 : 1);
545  const ptrdiff_t stride2 = stride << 1;
546  int min_width = FFMIN(width, 32);
547 
548  for (slice = 0; slice < slices; slice++) {
549  slice_start = ((slice * height) / slices) & cmask;
550  slice_height = ((((slice + 1) * height) / slices) & cmask) -
551  slice_start;
552  slice_height >>= 1;
553  if (!slice_height)
554  continue;
555 
556  bsrc = src + slice_start * stride;
557 
558  // first line - left neighbour prediction
559  bsrc[0] += 0x80;
560  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
561  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
562  bsrc += stride2;
563  if (slice_height <= 1)
564  continue;
565  for (j = 1; j < slice_height; j++) {
566  // second line - first element has top prediction, the rest uses gradient
567  bsrc[0] = (bsrc[0] + bsrc[-stride2]) & 0xFF;
568  for (i = 1; i < min_width; i++) { /* dsp need align 32 */
569  A = bsrc[i - stride2];
570  B = bsrc[i - (stride2 + 1)];
571  C = bsrc[i - 1];
572  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
573  }
574  if (width > 32)
575  c->llviddsp.add_gradient_pred(bsrc + 32, stride2, width - 32);
576 
577  A = bsrc[-stride];
578  B = bsrc[-(1 + stride + stride - width)];
579  C = bsrc[width - 1];
580  bsrc[stride] = (A - B + C + bsrc[stride]) & 0xFF;
581  for (i = 1; i < width; i++) {
582  A = bsrc[i - stride];
583  B = bsrc[i - (1 + stride)];
584  C = bsrc[i - 1 + stride];
585  bsrc[i + stride] = (A - B + C + bsrc[i + stride]) & 0xFF;
586  }
587  bsrc += stride2;
588  }
589  }
590 }
591 
592 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
593  AVPacket *avpkt)
594 {
595  const uint8_t *buf = avpkt->data;
596  int buf_size = avpkt->size;
597  UtvideoContext *c = avctx->priv_data;
598  int i, j;
599  const uint8_t *plane_start[5];
600  int plane_size, max_slice_size = 0, slice_start, slice_end, slice_size;
601  int ret;
602  GetByteContext gb;
603  ThreadFrame frame = { .f = data };
604 
605  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
606  return ret;
607 
608  /* parse plane structure to get frame flags and validate slice offsets */
609  bytestream2_init(&gb, buf, buf_size);
610 
611  if (c->pack) {
612  const uint8_t *packed_stream;
613  const uint8_t *control_stream;
614  GetByteContext pb;
615  uint32_t nb_cbs;
616  int left;
617 
618  c->frame_info = PRED_GRADIENT << 8;
619 
620  if (bytestream2_get_byte(&gb) != 1)
621  return AVERROR_INVALIDDATA;
622  bytestream2_skip(&gb, 3);
623  c->offset = bytestream2_get_le32(&gb);
624 
625  if (buf_size <= c->offset + 8LL)
626  return AVERROR_INVALIDDATA;
627 
628  bytestream2_init(&pb, buf + 8 + c->offset, buf_size - 8 - c->offset);
629 
630  nb_cbs = bytestream2_get_le32(&pb);
631  if (nb_cbs > c->offset)
632  return AVERROR_INVALIDDATA;
633 
634  packed_stream = buf + 8;
635  control_stream = packed_stream + (c->offset - nb_cbs);
636  left = control_stream - packed_stream;
637 
638  for (i = 0; i < c->planes; i++) {
639  for (j = 0; j < c->slices; j++) {
640  c->packed_stream[i][j] = packed_stream;
641  c->packed_stream_size[i][j] = bytestream2_get_le32(&pb);
642  left -= c->packed_stream_size[i][j];
643  if (left < 0)
644  return AVERROR_INVALIDDATA;
645  packed_stream += c->packed_stream_size[i][j];
646  }
647  }
648 
649  left = buf + buf_size - control_stream;
650 
651  for (i = 0; i < c->planes; i++) {
652  for (j = 0; j < c->slices; j++) {
653  c->control_stream[i][j] = control_stream;
654  c->control_stream_size[i][j] = bytestream2_get_le32(&pb);
655  left -= c->control_stream_size[i][j];
656  if (left < 0)
657  return AVERROR_INVALIDDATA;
658  control_stream += c->control_stream_size[i][j];
659  }
660  }
661  } else if (c->pro) {
663  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
664  return AVERROR_INVALIDDATA;
665  }
666  c->frame_info = bytestream2_get_le32u(&gb);
667  c->slices = ((c->frame_info >> 16) & 0xff) + 1;
668  for (i = 0; i < c->planes; i++) {
669  plane_start[i] = gb.buffer;
670  if (bytestream2_get_bytes_left(&gb) < 1024 + 4 * c->slices) {
671  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
672  return AVERROR_INVALIDDATA;
673  }
674  slice_start = 0;
675  slice_end = 0;
676  for (j = 0; j < c->slices; j++) {
677  slice_end = bytestream2_get_le32u(&gb);
678  if (slice_end < 0 || slice_end < slice_start ||
679  bytestream2_get_bytes_left(&gb) < slice_end) {
680  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
681  return AVERROR_INVALIDDATA;
682  }
683  slice_size = slice_end - slice_start;
684  slice_start = slice_end;
685  max_slice_size = FFMAX(max_slice_size, slice_size);
686  }
687  plane_size = slice_end;
688  bytestream2_skipu(&gb, plane_size);
689  bytestream2_skipu(&gb, 1024);
690  }
691  plane_start[c->planes] = gb.buffer;
692  } else {
693  for (i = 0; i < c->planes; i++) {
694  plane_start[i] = gb.buffer;
695  if (bytestream2_get_bytes_left(&gb) < 256 + 4 * c->slices) {
696  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
697  return AVERROR_INVALIDDATA;
698  }
699  bytestream2_skipu(&gb, 256);
700  slice_start = 0;
701  slice_end = 0;
702  for (j = 0; j < c->slices; j++) {
703  slice_end = bytestream2_get_le32u(&gb);
704  if (slice_end < 0 || slice_end < slice_start ||
705  bytestream2_get_bytes_left(&gb) < slice_end) {
706  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
707  return AVERROR_INVALIDDATA;
708  }
709  slice_size = slice_end - slice_start;
710  slice_start = slice_end;
711  max_slice_size = FFMAX(max_slice_size, slice_size);
712  }
713  plane_size = slice_end;
714  bytestream2_skipu(&gb, plane_size);
715  }
716  plane_start[c->planes] = gb.buffer;
718  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
719  return AVERROR_INVALIDDATA;
720  }
721  c->frame_info = bytestream2_get_le32u(&gb);
722  }
723  av_log(avctx, AV_LOG_DEBUG, "frame information flags %"PRIX32"\n",
724  c->frame_info);
725 
726  c->frame_pred = (c->frame_info >> 8) & 3;
727 
728  max_slice_size += 4*avctx->width;
729 
730  if (!c->pack) {
732  max_slice_size + AV_INPUT_BUFFER_PADDING_SIZE);
733 
734  if (!c->slice_bits) {
735  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
736  return AVERROR(ENOMEM);
737  }
738  }
739 
740  switch (c->avctx->pix_fmt) {
741  case AV_PIX_FMT_GBRP:
742  case AV_PIX_FMT_GBRAP:
743  for (i = 0; i < c->planes; i++) {
744  ret = decode_plane(c, i, frame.f->data[i],
745  frame.f->linesize[i], avctx->width,
746  avctx->height, plane_start[i],
747  c->frame_pred == PRED_LEFT);
748  if (ret)
749  return ret;
750  if (c->frame_pred == PRED_MEDIAN) {
751  if (!c->interlaced) {
752  restore_median_planar(c, frame.f->data[i],
753  frame.f->linesize[i], avctx->width,
754  avctx->height, c->slices, 0);
755  } else {
756  restore_median_planar_il(c, frame.f->data[i],
757  frame.f->linesize[i],
758  avctx->width, avctx->height, c->slices,
759  0);
760  }
761  } else if (c->frame_pred == PRED_GRADIENT) {
762  if (!c->interlaced) {
763  restore_gradient_planar(c, frame.f->data[i],
764  frame.f->linesize[i], avctx->width,
765  avctx->height, c->slices, 0);
766  } else {
767  restore_gradient_planar_il(c, frame.f->data[i],
768  frame.f->linesize[i],
769  avctx->width, avctx->height, c->slices,
770  0);
771  }
772  }
773  }
774  c->utdsp.restore_rgb_planes(frame.f->data[2], frame.f->data[0], frame.f->data[1],
775  frame.f->linesize[2], frame.f->linesize[0], frame.f->linesize[1],
776  avctx->width, avctx->height);
777  break;
778  case AV_PIX_FMT_GBRAP10:
779  case AV_PIX_FMT_GBRP10:
780  for (i = 0; i < c->planes; i++) {
781  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i],
782  frame.f->linesize[i] / 2, avctx->width,
783  avctx->height, plane_start[i],
784  plane_start[i + 1] - 1024,
785  c->frame_pred == PRED_LEFT);
786  if (ret)
787  return ret;
788  }
789  c->utdsp.restore_rgb_planes10((uint16_t *)frame.f->data[2], (uint16_t *)frame.f->data[0], (uint16_t *)frame.f->data[1],
790  frame.f->linesize[2] / 2, frame.f->linesize[0] / 2, frame.f->linesize[1] / 2,
791  avctx->width, avctx->height);
792  break;
793  case AV_PIX_FMT_YUV420P:
794  for (i = 0; i < 3; i++) {
795  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
796  avctx->width >> !!i, avctx->height >> !!i,
797  plane_start[i], c->frame_pred == PRED_LEFT);
798  if (ret)
799  return ret;
800  if (c->frame_pred == PRED_MEDIAN) {
801  if (!c->interlaced) {
802  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
803  avctx->width >> !!i, avctx->height >> !!i,
804  c->slices, !i);
805  } else {
806  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
807  avctx->width >> !!i,
808  avctx->height >> !!i,
809  c->slices, !i);
810  }
811  } else if (c->frame_pred == PRED_GRADIENT) {
812  if (!c->interlaced) {
813  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
814  avctx->width >> !!i, avctx->height >> !!i,
815  c->slices, !i);
816  } else {
817  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
818  avctx->width >> !!i,
819  avctx->height >> !!i,
820  c->slices, !i);
821  }
822  }
823  }
824  break;
825  case AV_PIX_FMT_YUV422P:
826  for (i = 0; i < 3; i++) {
827  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
828  avctx->width >> !!i, avctx->height,
829  plane_start[i], c->frame_pred == PRED_LEFT);
830  if (ret)
831  return ret;
832  if (c->frame_pred == PRED_MEDIAN) {
833  if (!c->interlaced) {
834  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
835  avctx->width >> !!i, avctx->height,
836  c->slices, 0);
837  } else {
838  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
839  avctx->width >> !!i, avctx->height,
840  c->slices, 0);
841  }
842  } else if (c->frame_pred == PRED_GRADIENT) {
843  if (!c->interlaced) {
844  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
845  avctx->width >> !!i, avctx->height,
846  c->slices, 0);
847  } else {
848  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
849  avctx->width >> !!i, avctx->height,
850  c->slices, 0);
851  }
852  }
853  }
854  break;
855  case AV_PIX_FMT_YUV444P:
856  for (i = 0; i < 3; i++) {
857  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
858  avctx->width, avctx->height,
859  plane_start[i], c->frame_pred == PRED_LEFT);
860  if (ret)
861  return ret;
862  if (c->frame_pred == PRED_MEDIAN) {
863  if (!c->interlaced) {
864  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
865  avctx->width, avctx->height,
866  c->slices, 0);
867  } else {
868  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
869  avctx->width, avctx->height,
870  c->slices, 0);
871  }
872  } else if (c->frame_pred == PRED_GRADIENT) {
873  if (!c->interlaced) {
874  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
875  avctx->width, avctx->height,
876  c->slices, 0);
877  } else {
878  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
879  avctx->width, avctx->height,
880  c->slices, 0);
881  }
882  }
883  }
884  break;
886  for (i = 0; i < 3; i++) {
887  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], frame.f->linesize[i] / 2,
888  avctx->width >> !!i, avctx->height,
889  plane_start[i], plane_start[i + 1] - 1024, c->frame_pred == PRED_LEFT);
890  if (ret)
891  return ret;
892  }
893  break;
894  }
895 
896  frame.f->key_frame = 1;
897  frame.f->pict_type = AV_PICTURE_TYPE_I;
898  frame.f->interlaced_frame = !!c->interlaced;
899 
900  *got_frame = 1;
901 
902  /* always report that the buffer was completely consumed */
903  return buf_size;
904 }
905 
907 {
908  UtvideoContext * const c = avctx->priv_data;
909 
910  c->avctx = avctx;
911 
913  ff_bswapdsp_init(&c->bdsp);
915 
916  c->slice_bits_size = 0;
917 
918  switch (avctx->codec_tag) {
919  case MKTAG('U', 'L', 'R', 'G'):
920  c->planes = 3;
921  avctx->pix_fmt = AV_PIX_FMT_GBRP;
922  break;
923  case MKTAG('U', 'L', 'R', 'A'):
924  c->planes = 4;
925  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
926  break;
927  case MKTAG('U', 'L', 'Y', '0'):
928  c->planes = 3;
929  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
930  avctx->colorspace = AVCOL_SPC_BT470BG;
931  break;
932  case MKTAG('U', 'L', 'Y', '2'):
933  c->planes = 3;
934  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
935  avctx->colorspace = AVCOL_SPC_BT470BG;
936  break;
937  case MKTAG('U', 'L', 'Y', '4'):
938  c->planes = 3;
939  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
940  avctx->colorspace = AVCOL_SPC_BT470BG;
941  break;
942  case MKTAG('U', 'Q', 'Y', '2'):
943  c->planes = 3;
944  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
945  break;
946  case MKTAG('U', 'Q', 'R', 'G'):
947  c->planes = 3;
948  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
949  break;
950  case MKTAG('U', 'Q', 'R', 'A'):
951  c->planes = 4;
952  avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
953  break;
954  case MKTAG('U', 'L', 'H', '0'):
955  c->planes = 3;
956  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
957  avctx->colorspace = AVCOL_SPC_BT709;
958  break;
959  case MKTAG('U', 'L', 'H', '2'):
960  c->planes = 3;
961  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
962  avctx->colorspace = AVCOL_SPC_BT709;
963  break;
964  case MKTAG('U', 'L', 'H', '4'):
965  c->planes = 3;
966  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
967  avctx->colorspace = AVCOL_SPC_BT709;
968  break;
969  case MKTAG('U', 'M', 'Y', '2'):
970  c->planes = 3;
971  c->pack = 1;
972  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
973  avctx->colorspace = AVCOL_SPC_BT470BG;
974  break;
975  case MKTAG('U', 'M', 'H', '2'):
976  c->planes = 3;
977  c->pack = 1;
978  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
979  avctx->colorspace = AVCOL_SPC_BT709;
980  break;
981  case MKTAG('U', 'M', 'Y', '4'):
982  c->planes = 3;
983  c->pack = 1;
984  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
985  avctx->colorspace = AVCOL_SPC_BT470BG;
986  break;
987  case MKTAG('U', 'M', 'H', '4'):
988  c->planes = 3;
989  c->pack = 1;
990  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
991  avctx->colorspace = AVCOL_SPC_BT709;
992  break;
993  case MKTAG('U', 'M', 'R', 'G'):
994  c->planes = 3;
995  c->pack = 1;
996  avctx->pix_fmt = AV_PIX_FMT_GBRP;
997  break;
998  case MKTAG('U', 'M', 'R', 'A'):
999  c->planes = 4;
1000  c->pack = 1;
1001  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
1002  break;
1003  default:
1004  av_log(avctx, AV_LOG_ERROR, "Unknown Ut Video FOURCC provided (%08X)\n",
1005  avctx->codec_tag);
1006  return AVERROR_INVALIDDATA;
1007  }
1008 
1009  if (c->pack && avctx->extradata_size >= 16) {
1010  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
1011  avctx->extradata[3], avctx->extradata[2],
1012  avctx->extradata[1], avctx->extradata[0]);
1013  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
1014  AV_RB32(avctx->extradata + 4));
1015  c->compression = avctx->extradata[8];
1016  if (c->compression != 2)
1017  avpriv_request_sample(avctx, "Unknown compression type");
1018  c->slices = avctx->extradata[9] + 1;
1019  } else if (avctx->extradata_size >= 16) {
1020  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
1021  avctx->extradata[3], avctx->extradata[2],
1022  avctx->extradata[1], avctx->extradata[0]);
1023  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
1024  AV_RB32(avctx->extradata + 4));
1025  c->frame_info_size = AV_RL32(avctx->extradata + 8);
1026  c->flags = AV_RL32(avctx->extradata + 12);
1027 
1028  if (c->frame_info_size != 4)
1029  avpriv_request_sample(avctx, "Frame info not 4 bytes");
1030  av_log(avctx, AV_LOG_DEBUG, "Encoding parameters %08"PRIX32"\n", c->flags);
1031  c->slices = (c->flags >> 24) + 1;
1032  c->compression = c->flags & 1;
1033  c->interlaced = c->flags & 0x800;
1034  } else if (avctx->extradata_size == 8) {
1035  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
1036  avctx->extradata[3], avctx->extradata[2],
1037  avctx->extradata[1], avctx->extradata[0]);
1038  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
1039  AV_RB32(avctx->extradata + 4));
1040  c->interlaced = 0;
1041  c->pro = 1;
1042  c->frame_info_size = 4;
1043  } else {
1044  av_log(avctx, AV_LOG_ERROR,
1045  "Insufficient extradata size %d, should be at least 16\n",
1046  avctx->extradata_size);
1047  return AVERROR_INVALIDDATA;
1048  }
1049 
1050  return 0;
1051 }
1052 
1054 {
1055  UtvideoContext * const c = avctx->priv_data;
1056 
1057  av_freep(&c->slice_bits);
1058 
1059  return 0;
1060 }
1061 
1063  .name = "utvideo",
1064  .long_name = NULL_IF_CONFIG_SMALL("Ut Video"),
1065  .type = AVMEDIA_TYPE_VIDEO,
1066  .id = AV_CODEC_ID_UTVIDEO,
1067  .priv_data_size = sizeof(UtvideoContext),
1068  .init = decode_init,
1069  .close = decode_end,
1070  .decode = decode_frame,
1071  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
1072  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1073 };
void(* bswap_buf)(uint32_t *dst, const uint32_t *src, int w)
Definition: bswapdsp.h:25
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:475
static void restore_median_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:441
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
static enum AVPixelFormat pix_fmt
int ff_ut10_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:43
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
#define C
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:388
uint32_t flags
Definition: utvideo.h:75
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVFrame * f
Definition: thread.h:36
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
void(* restore_rgb_planes)(uint8_t *src_r, uint8_t *src_g, uint8_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:28
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:479
int slice_bits_size
Definition: utvideo.h:86
int ff_init_vlc_sparse(VLC *vlc_arg, int nb_bits, int nb_codes, const void *bits, int bits_wrap, int bits_size, const void *codes, int codes_wrap, int codes_size, const void *symbols, int symbols_wrap, int symbols_size, int flags)
Definition: bitstream.c:268
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:164
int size
Definition: avcodec.h:1415
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:384
static av_cold int decode_end(AVCodecContext *avctx)
Definition: utvideodec.c:1053
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1711
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
#define src
Definition: vp8dsp.c:254
AVCodec.
Definition: avcodec.h:3365
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
int interlaced
Definition: utvideo.h:79
av_cold void ff_utvideodsp_init(UTVideoDSPContext *c)
Definition: utvideodsp.c:75
void void avpriv_request_sample(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t
#define av_cold
Definition: attributes.h:82
static void restore_gradient_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:496
static void restore_gradient_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:537
Multithreading support functions.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1602
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:343
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:87
uint32_t frame_info
Definition: utvideo.h:75
static AVFrame * frame
#define height
uint8_t * data
Definition: avcodec.h:1414
const uint8_t * buffer
Definition: bytestream.h:34
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:170
bitstream reader API header.
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
static int decode_plane10(UtvideoContext *c, int plane_no, uint16_t *dst, ptrdiff_t stride, int width, int height, const uint8_t *src, const uint8_t *huff, int use_pred)
Definition: utvideodec.c:128
#define A(x)
Definition: vp56_arith.h:28
#define av_log(a,...)
static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:85
BswapDSPContext bdsp
Definition: utvideo.h:71
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:587
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static av_cold int decode_init(AVCodecContext *avctx)
Definition: utvideodec.c:906
#define AVERROR(e)
Definition: error.h:43
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
#define B
Definition: huffyuvdsp.h:32
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
AVCodecContext * avctx
Definition: utvideo.h:69
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
uint16_t width
Definition: gdv.c:47
const char * name
Name of the codec implementation.
Definition: avcodec.h:3372
uint32_t frame_info_size
Definition: utvideo.h:75
static const uint8_t offset[127][2]
Definition: vf_spp.c:92
#define FFMAX(a, b)
Definition: common.h:94
#define fail()
Definition: checkasm.h:113
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1012
Definition: vlc.h:26
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:488
static void restore_median_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:389
int compression
Definition: utvideo.h:78
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:284
#define FFMIN(a, b)
Definition: common.h:96
static int compute_cmask(int plane_no, int interlaced, enum AVPixelFormat pix_fmt)
Definition: utvideodec.c:231
uint8_t interlaced
Definition: mxfenc.c:1949
int width
picture width / height.
Definition: avcodec.h:1674
size_t control_stream_size[4][256]
Definition: utvideo.h:91
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:554
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: utvideodec.c:592
Common Ut Video header.
int frame_pred
Definition: utvideo.h:80
uint8_t len
Definition: magicyuv.c:49
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:456
uint32_t offset
Definition: utvideo.h:75
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:1502
const uint8_t * control_stream[4][256]
Definition: utvideo.h:90
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:1527
void * buf
Definition: avisynth_c.h:690
#define VLC_BITS
int extradata_size
Definition: avcodec.h:1603
void ff_llviddsp_init(LLVidDSPContext *c)
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2125
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:425
#define mid_pred
Definition: mathops.h:97
static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:41
void(* restore_rgb_planes10)(uint16_t *src_r, uint16_t *src_g, uint16_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:31
uint8_t * slice_bits
Definition: utvideo.h:85
static unsigned int get_bits_le(GetBitContext *s, int n)
Definition: get_bits.h:281
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:369
static int decode_plane(UtvideoContext *c, int plane_no, uint8_t *dst, ptrdiff_t stride, int width, int height, const uint8_t *src, int use_pred)
Definition: utvideodec.c:241
int ff_ut_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:37
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
LLVidDSPContext llviddsp
Definition: utvideo.h:72
void(* add_gradient_pred)(uint8_t *src, const ptrdiff_t stride, const ptrdiff_t width)
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:105
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal api header.
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:211
static double c[64]
uint16_t sym
Definition: magicyuv.c:48
size_t packed_stream_size[4][256]
Definition: utvideo.h:89
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
Definition: avcodec.h:770
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2037
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
void * priv_data
Definition: avcodec.h:1529
int(* add_left_pred)(uint8_t *dst, const uint8_t *src, ptrdiff_t w, int left)
int len
VLC_TYPE(* table)[2]
code, bits
Definition: vlc.h:28
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:279
UTVideoDSPContext utdsp
Definition: utvideo.h:70
#define av_freep(p)
const uint8_t * packed_stream[4][256]
Definition: utvideo.h:88
#define stride
#define MKTAG(a, b, c, d)
Definition: common.h:366
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
AVCodec ff_utvideo_decoder
Definition: utvideodec.c:1062
This structure stores compressed data.
Definition: avcodec.h:1391
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:354
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:956
void(* add_median_pred)(uint8_t *dst, const uint8_t *top, const uint8_t *diff, ptrdiff_t w, int *left, int *left_top)