FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
utvideodec.c
Go to the documentation of this file.
1 /*
2  * Ut Video decoder
3  * Copyright (c) 2011 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Ut Video decoder
25  */
26 
27 #include <inttypes.h>
28 #include <stdlib.h>
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 
32 #include "libavutil/intreadwrite.h"
33 #include "avcodec.h"
34 #include "bswapdsp.h"
35 #include "bytestream.h"
36 #include "get_bits.h"
37 #include "internal.h"
38 #include "thread.h"
39 #include "utvideo.h"
40 
41 static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
42 {
43  int i;
44  HuffEntry he[1024];
45  int last;
46  uint32_t codes[1024];
47  uint8_t bits[1024];
48  uint16_t syms[1024];
49  uint32_t code;
50 
51  *fsym = -1;
52  for (i = 0; i < 1024; i++) {
53  he[i].sym = i;
54  he[i].len = *src++;
55  }
56  qsort(he, 1024, sizeof(*he), ff_ut10_huff_cmp_len);
57 
58  if (!he[0].len) {
59  *fsym = he[0].sym;
60  return 0;
61  }
62 
63  last = 1023;
64  while (he[last].len == 255 && last)
65  last--;
66 
67  if (he[last].len > 32) {
68  return -1;
69  }
70 
71  code = 1;
72  for (i = last; i >= 0; i--) {
73  codes[i] = code >> (32 - he[i].len);
74  bits[i] = he[i].len;
75  syms[i] = he[i].sym;
76  code += 0x80000000u >> (he[i].len - 1);
77  }
78 #define VLC_BITS 11
79  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
80  bits, sizeof(*bits), sizeof(*bits),
81  codes, sizeof(*codes), sizeof(*codes),
82  syms, sizeof(*syms), sizeof(*syms), 0);
83 }
84 
85 static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
86 {
87  int i;
88  HuffEntry he[256];
89  int last;
90  uint32_t codes[256];
91  uint8_t bits[256];
92  uint8_t syms[256];
93  uint32_t code;
94 
95  *fsym = -1;
96  for (i = 0; i < 256; i++) {
97  he[i].sym = i;
98  he[i].len = *src++;
99  }
100  qsort(he, 256, sizeof(*he), ff_ut_huff_cmp_len);
101 
102  if (!he[0].len) {
103  *fsym = he[0].sym;
104  return 0;
105  }
106 
107  last = 255;
108  while (he[last].len == 255 && last)
109  last--;
110 
111  if (he[last].len > 32)
112  return -1;
113 
114  code = 1;
115  for (i = last; i >= 0; i--) {
116  codes[i] = code >> (32 - he[i].len);
117  bits[i] = he[i].len;
118  syms[i] = he[i].sym;
119  code += 0x80000000u >> (he[i].len - 1);
120  }
121 
122  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
123  bits, sizeof(*bits), sizeof(*bits),
124  codes, sizeof(*codes), sizeof(*codes),
125  syms, sizeof(*syms), sizeof(*syms), 0);
126 }
127 
128 static int decode_plane10(UtvideoContext *c, int plane_no,
129  uint16_t *dst, ptrdiff_t stride,
130  int width, int height,
131  const uint8_t *src, const uint8_t *huff,
132  int use_pred)
133 {
134  int i, j, slice, pix, ret;
135  int sstart, send;
136  VLC vlc;
137  GetBitContext gb;
138  int prev, fsym;
139 
140  if ((ret = build_huff10(huff, &vlc, &fsym)) < 0) {
141  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
142  return ret;
143  }
144  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
145  send = 0;
146  for (slice = 0; slice < c->slices; slice++) {
147  uint16_t *dest;
148 
149  sstart = send;
150  send = (height * (slice + 1) / c->slices);
151  dest = dst + sstart * stride;
152 
153  prev = 0x200;
154  for (j = sstart; j < send; j++) {
155  for (i = 0; i < width; i++) {
156  pix = fsym;
157  if (use_pred) {
158  prev += pix;
159  prev &= 0x3FF;
160  pix = prev;
161  }
162  dest[i] = pix;
163  }
164  dest += stride;
165  }
166  }
167  return 0;
168  }
169 
170  send = 0;
171  for (slice = 0; slice < c->slices; slice++) {
172  uint16_t *dest;
173  int slice_data_start, slice_data_end, slice_size;
174 
175  sstart = send;
176  send = (height * (slice + 1) / c->slices);
177  dest = dst + sstart * stride;
178 
179  // slice offset and size validation was done earlier
180  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
181  slice_data_end = AV_RL32(src + slice * 4);
182  slice_size = slice_data_end - slice_data_start;
183 
184  if (!slice_size) {
185  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
186  "yet a slice has a length of zero.\n");
187  goto fail;
188  }
189 
190  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
191  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
192  (uint32_t *)(src + slice_data_start + c->slices * 4),
193  (slice_data_end - slice_data_start + 3) >> 2);
194  init_get_bits(&gb, c->slice_bits, slice_size * 8);
195 
196  prev = 0x200;
197  for (j = sstart; j < send; j++) {
198  for (i = 0; i < width; i++) {
199  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
200  if (pix < 0) {
201  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
202  goto fail;
203  }
204  if (use_pred) {
205  prev += pix;
206  prev &= 0x3FF;
207  pix = prev;
208  }
209  dest[i] = pix;
210  }
211  dest += stride;
212  if (get_bits_left(&gb) < 0) {
214  "Slice decoding ran out of bits\n");
215  goto fail;
216  }
217  }
218  if (get_bits_left(&gb) > 32)
220  "%d bits left after decoding slice\n", get_bits_left(&gb));
221  }
222 
223  ff_free_vlc(&vlc);
224 
225  return 0;
226 fail:
227  ff_free_vlc(&vlc);
228  return AVERROR_INVALIDDATA;
229 }
230 
231 static int compute_cmask(int plane_no, int interlaced, enum AVPixelFormat pix_fmt)
232 {
233  const int is_luma = (pix_fmt == AV_PIX_FMT_YUV420P) && !plane_no;
234 
235  if (interlaced)
236  return ~(1 + 2 * is_luma);
237 
238  return ~is_luma;
239 }
240 
241 static int decode_plane(UtvideoContext *c, int plane_no,
242  uint8_t *dst, ptrdiff_t stride,
243  int width, int height,
244  const uint8_t *src, int use_pred)
245 {
246  int i, j, slice, pix;
247  int sstart, send;
248  VLC vlc;
249  GetBitContext gb;
250  int prev, fsym;
251  const int cmask = compute_cmask(plane_no, c->interlaced, c->avctx->pix_fmt);
252 
253  if (build_huff(src, &vlc, &fsym)) {
254  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
255  return AVERROR_INVALIDDATA;
256  }
257  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
258  send = 0;
259  for (slice = 0; slice < c->slices; slice++) {
260  uint8_t *dest;
261 
262  sstart = send;
263  send = (height * (slice + 1) / c->slices) & cmask;
264  dest = dst + sstart * stride;
265 
266  prev = 0x80;
267  for (j = sstart; j < send; j++) {
268  for (i = 0; i < width; i++) {
269  pix = fsym;
270  if (use_pred) {
271  prev += pix;
272  pix = prev;
273  }
274  dest[i] = pix;
275  }
276  dest += stride;
277  }
278  }
279  return 0;
280  }
281 
282  src += 256;
283 
284  send = 0;
285  for (slice = 0; slice < c->slices; slice++) {
286  uint8_t *dest;
287  int slice_data_start, slice_data_end, slice_size;
288 
289  sstart = send;
290  send = (height * (slice + 1) / c->slices) & cmask;
291  dest = dst + sstart * stride;
292 
293  // slice offset and size validation was done earlier
294  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
295  slice_data_end = AV_RL32(src + slice * 4);
296  slice_size = slice_data_end - slice_data_start;
297 
298  if (!slice_size) {
299  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
300  "yet a slice has a length of zero.\n");
301  goto fail;
302  }
303 
304  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
305  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
306  (uint32_t *)(src + slice_data_start + c->slices * 4),
307  (slice_data_end - slice_data_start + 3) >> 2);
308  init_get_bits(&gb, c->slice_bits, slice_size * 8);
309 
310  prev = 0x80;
311  for (j = sstart; j < send; j++) {
312  for (i = 0; i < width; i++) {
313  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
314  if (pix < 0) {
315  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
316  goto fail;
317  }
318  if (use_pred) {
319  prev += pix;
320  pix = prev;
321  }
322  dest[i] = pix;
323  }
324  if (get_bits_left(&gb) < 0) {
326  "Slice decoding ran out of bits\n");
327  goto fail;
328  }
329  dest += stride;
330  }
331  if (get_bits_left(&gb) > 32)
333  "%d bits left after decoding slice\n", get_bits_left(&gb));
334  }
335 
336  ff_free_vlc(&vlc);
337 
338  return 0;
339 fail:
340  ff_free_vlc(&vlc);
341  return AVERROR_INVALIDDATA;
342 }
343 
344 #undef A
345 #undef B
346 #undef C
347 
349  int width, int height, int slices, int rmode)
350 {
351  int i, j, slice;
352  int A, B, C;
353  uint8_t *bsrc;
354  int slice_start, slice_height;
355  const int cmask = ~rmode;
356 
357  for (slice = 0; slice < slices; slice++) {
358  slice_start = ((slice * height) / slices) & cmask;
359  slice_height = ((((slice + 1) * height) / slices) & cmask) -
360  slice_start;
361 
362  if (!slice_height)
363  continue;
364  bsrc = src + slice_start * stride;
365 
366  // first line - left neighbour prediction
367  bsrc[0] += 0x80;
368  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
369  bsrc += stride;
370  if (slice_height <= 1)
371  continue;
372  // second line - first element has top prediction, the rest uses median
373  C = bsrc[-stride];
374  bsrc[0] += C;
375  A = bsrc[0];
376  for (i = 1; i < width; i++) {
377  B = bsrc[i - stride];
378  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
379  C = B;
380  A = bsrc[i];
381  }
382  bsrc += stride;
383  // the rest of lines use continuous median prediction
384  for (j = 2; j < slice_height; j++) {
385  c->llviddsp.add_median_pred(bsrc, bsrc - stride,
386  bsrc, width, &A, &B);
387  bsrc += stride;
388  }
389  }
390 }
391 
392 /* UtVideo interlaced mode treats every two lines as a single one,
393  * so restoring function should take care of possible padding between
394  * two parts of the same "line".
395  */
397  int width, int height, int slices, int rmode)
398 {
399  int i, j, slice;
400  int A, B, C;
401  uint8_t *bsrc;
402  int slice_start, slice_height;
403  const int cmask = ~(rmode ? 3 : 1);
404  const ptrdiff_t stride2 = stride << 1;
405 
406  for (slice = 0; slice < slices; slice++) {
407  slice_start = ((slice * height) / slices) & cmask;
408  slice_height = ((((slice + 1) * height) / slices) & cmask) -
409  slice_start;
410  slice_height >>= 1;
411  if (!slice_height)
412  continue;
413 
414  bsrc = src + slice_start * stride;
415 
416  // first line - left neighbour prediction
417  bsrc[0] += 0x80;
418  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
419  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
420  bsrc += stride2;
421  if (slice_height <= 1)
422  continue;
423  // second line - first element has top prediction, the rest uses median
424  C = bsrc[-stride2];
425  bsrc[0] += C;
426  A = bsrc[0];
427  for (i = 1; i < width; i++) {
428  B = bsrc[i - stride2];
429  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
430  C = B;
431  A = bsrc[i];
432  }
433  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
434  bsrc + stride, width, &A, &B);
435  bsrc += stride2;
436  // the rest of lines use continuous median prediction
437  for (j = 2; j < slice_height; j++) {
438  c->llviddsp.add_median_pred(bsrc, bsrc - stride2,
439  bsrc, width, &A, &B);
440  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
441  bsrc + stride, width, &A, &B);
442  bsrc += stride2;
443  }
444  }
445 }
446 
448  int width, int height, int slices, int rmode)
449 {
450  int i, j, slice;
451  int A, B, C;
452  uint8_t *bsrc;
453  int slice_start, slice_height;
454  const int cmask = ~rmode;
455 
456  for (slice = 0; slice < slices; slice++) {
457  slice_start = ((slice * height) / slices) & cmask;
458  slice_height = ((((slice + 1) * height) / slices) & cmask) -
459  slice_start;
460 
461  if (!slice_height)
462  continue;
463  bsrc = src + slice_start * stride;
464 
465  // first line - left neighbour prediction
466  bsrc[0] += 0x80;
467  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
468  bsrc += stride;
469  if (slice_height <= 1)
470  continue;
471  for (j = 1; j < slice_height; j++) {
472  // second line - first element has top prediction, the rest uses gradient
473  bsrc[0] = (bsrc[0] + bsrc[-stride]) & 0xFF;
474  for (i = 1; i < width; i++) {
475  A = bsrc[i - stride];
476  B = bsrc[i - (stride + 1)];
477  C = bsrc[i - 1];
478  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
479  }
480  bsrc += stride;
481  }
482  }
483 }
484 
486  int width, int height, int slices, int rmode)
487 {
488  int i, j, slice;
489  int A, B, C;
490  uint8_t *bsrc;
491  int slice_start, slice_height;
492  const int cmask = ~(rmode ? 3 : 1);
493  const ptrdiff_t stride2 = stride << 1;
494 
495  for (slice = 0; slice < slices; slice++) {
496  slice_start = ((slice * height) / slices) & cmask;
497  slice_height = ((((slice + 1) * height) / slices) & cmask) -
498  slice_start;
499  slice_height >>= 1;
500  if (!slice_height)
501  continue;
502 
503  bsrc = src + slice_start * stride;
504 
505  // first line - left neighbour prediction
506  bsrc[0] += 0x80;
507  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
508  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
509  bsrc += stride2;
510  if (slice_height <= 1)
511  continue;
512  for (j = 1; j < slice_height; j++) {
513  // second line - first element has top prediction, the rest uses gradient
514  bsrc[0] = (bsrc[0] + bsrc[-stride2]) & 0xFF;
515  for (i = 1; i < width; i++) {
516  A = bsrc[i - stride2];
517  B = bsrc[i - (stride2 + 1)];
518  C = bsrc[i - 1];
519  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
520  }
521  A = bsrc[-stride];
522  B = bsrc[-(1 + stride + stride - width)];
523  C = bsrc[width - 1];
524  bsrc[stride] = (A - B + C + bsrc[stride]) & 0xFF;
525  for (i = 1; i < width; i++) {
526  A = bsrc[i - stride];
527  B = bsrc[i - (1 + stride)];
528  C = bsrc[i - 1 + stride];
529  bsrc[i + stride] = (A - B + C + bsrc[i + stride]) & 0xFF;
530  }
531  bsrc += stride2;
532  }
533  }
534 }
535 
536 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
537  AVPacket *avpkt)
538 {
539  const uint8_t *buf = avpkt->data;
540  int buf_size = avpkt->size;
541  UtvideoContext *c = avctx->priv_data;
542  int i, j;
543  const uint8_t *plane_start[5];
544  int plane_size, max_slice_size = 0, slice_start, slice_end, slice_size;
545  int ret;
546  GetByteContext gb;
547  ThreadFrame frame = { .f = data };
548 
549  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
550  return ret;
551 
552  /* parse plane structure to get frame flags and validate slice offsets */
553  bytestream2_init(&gb, buf, buf_size);
554  if (c->pro) {
556  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
557  return AVERROR_INVALIDDATA;
558  }
559  c->frame_info = bytestream2_get_le32u(&gb);
560  c->slices = ((c->frame_info >> 16) & 0xff) + 1;
561  for (i = 0; i < c->planes; i++) {
562  plane_start[i] = gb.buffer;
563  if (bytestream2_get_bytes_left(&gb) < 1024 + 4 * c->slices) {
564  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
565  return AVERROR_INVALIDDATA;
566  }
567  slice_start = 0;
568  slice_end = 0;
569  for (j = 0; j < c->slices; j++) {
570  slice_end = bytestream2_get_le32u(&gb);
571  if (slice_end < 0 || slice_end < slice_start ||
572  bytestream2_get_bytes_left(&gb) < slice_end) {
573  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
574  return AVERROR_INVALIDDATA;
575  }
576  slice_size = slice_end - slice_start;
577  slice_start = slice_end;
578  max_slice_size = FFMAX(max_slice_size, slice_size);
579  }
580  plane_size = slice_end;
581  bytestream2_skipu(&gb, plane_size);
582  bytestream2_skipu(&gb, 1024);
583  }
584  plane_start[c->planes] = gb.buffer;
585  } else {
586  for (i = 0; i < c->planes; i++) {
587  plane_start[i] = gb.buffer;
588  if (bytestream2_get_bytes_left(&gb) < 256 + 4 * c->slices) {
589  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
590  return AVERROR_INVALIDDATA;
591  }
592  bytestream2_skipu(&gb, 256);
593  slice_start = 0;
594  slice_end = 0;
595  for (j = 0; j < c->slices; j++) {
596  slice_end = bytestream2_get_le32u(&gb);
597  if (slice_end < 0 || slice_end < slice_start ||
598  bytestream2_get_bytes_left(&gb) < slice_end) {
599  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
600  return AVERROR_INVALIDDATA;
601  }
602  slice_size = slice_end - slice_start;
603  slice_start = slice_end;
604  max_slice_size = FFMAX(max_slice_size, slice_size);
605  }
606  plane_size = slice_end;
607  bytestream2_skipu(&gb, plane_size);
608  }
609  plane_start[c->planes] = gb.buffer;
611  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
612  return AVERROR_INVALIDDATA;
613  }
614  c->frame_info = bytestream2_get_le32u(&gb);
615  }
616  av_log(avctx, AV_LOG_DEBUG, "frame information flags %"PRIX32"\n",
617  c->frame_info);
618 
619  c->frame_pred = (c->frame_info >> 8) & 3;
620 
621  max_slice_size += 4*avctx->width;
622 
624  max_slice_size + AV_INPUT_BUFFER_PADDING_SIZE);
625 
626  if (!c->slice_bits) {
627  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
628  return AVERROR(ENOMEM);
629  }
630 
631  switch (c->avctx->pix_fmt) {
632  case AV_PIX_FMT_GBRP:
633  case AV_PIX_FMT_GBRAP:
634  for (i = 0; i < c->planes; i++) {
635  ret = decode_plane(c, i, frame.f->data[i],
636  frame.f->linesize[i], avctx->width,
637  avctx->height, plane_start[i],
638  c->frame_pred == PRED_LEFT);
639  if (ret)
640  return ret;
641  if (c->frame_pred == PRED_MEDIAN) {
642  if (!c->interlaced) {
643  restore_median_planar(c, frame.f->data[i],
644  frame.f->linesize[i], avctx->width,
645  avctx->height, c->slices, 0);
646  } else {
647  restore_median_planar_il(c, frame.f->data[i],
648  frame.f->linesize[i],
649  avctx->width, avctx->height, c->slices,
650  0);
651  }
652  } else if (c->frame_pred == PRED_GRADIENT) {
653  if (!c->interlaced) {
654  restore_gradient_planar(c, frame.f->data[i],
655  frame.f->linesize[i], avctx->width,
656  avctx->height, c->slices, 0);
657  } else {
658  restore_gradient_planar_il(c, frame.f->data[i],
659  frame.f->linesize[i],
660  avctx->width, avctx->height, c->slices,
661  0);
662  }
663  }
664  }
665  c->utdsp.restore_rgb_planes(frame.f->data[2], frame.f->data[0], frame.f->data[1],
666  frame.f->linesize[2], frame.f->linesize[0], frame.f->linesize[1],
667  avctx->width, avctx->height);
668  break;
669  case AV_PIX_FMT_GBRAP10:
670  case AV_PIX_FMT_GBRP10:
671  for (i = 0; i < c->planes; i++) {
672  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i],
673  frame.f->linesize[i] / 2, avctx->width,
674  avctx->height, plane_start[i],
675  plane_start[i + 1] - 1024,
676  c->frame_pred == PRED_LEFT);
677  if (ret)
678  return ret;
679  }
680  c->utdsp.restore_rgb_planes10((uint16_t *)frame.f->data[2], (uint16_t *)frame.f->data[0], (uint16_t *)frame.f->data[1],
681  frame.f->linesize[2] / 2, frame.f->linesize[0] / 2, frame.f->linesize[1] / 2,
682  avctx->width, avctx->height);
683  break;
684  case AV_PIX_FMT_YUV420P:
685  for (i = 0; i < 3; i++) {
686  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
687  avctx->width >> !!i, avctx->height >> !!i,
688  plane_start[i], c->frame_pred == PRED_LEFT);
689  if (ret)
690  return ret;
691  if (c->frame_pred == PRED_MEDIAN) {
692  if (!c->interlaced) {
693  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
694  avctx->width >> !!i, avctx->height >> !!i,
695  c->slices, !i);
696  } else {
697  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
698  avctx->width >> !!i,
699  avctx->height >> !!i,
700  c->slices, !i);
701  }
702  } else if (c->frame_pred == PRED_GRADIENT) {
703  if (!c->interlaced) {
704  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
705  avctx->width >> !!i, avctx->height >> !!i,
706  c->slices, !i);
707  } else {
708  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
709  avctx->width >> !!i,
710  avctx->height >> !!i,
711  c->slices, !i);
712  }
713  }
714  }
715  break;
716  case AV_PIX_FMT_YUV422P:
717  for (i = 0; i < 3; i++) {
718  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
719  avctx->width >> !!i, avctx->height,
720  plane_start[i], c->frame_pred == PRED_LEFT);
721  if (ret)
722  return ret;
723  if (c->frame_pred == PRED_MEDIAN) {
724  if (!c->interlaced) {
725  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
726  avctx->width >> !!i, avctx->height,
727  c->slices, 0);
728  } else {
729  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
730  avctx->width >> !!i, avctx->height,
731  c->slices, 0);
732  }
733  } else if (c->frame_pred == PRED_GRADIENT) {
734  if (!c->interlaced) {
735  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
736  avctx->width >> !!i, avctx->height,
737  c->slices, 0);
738  } else {
739  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
740  avctx->width >> !!i, avctx->height,
741  c->slices, 0);
742  }
743  }
744  }
745  break;
746  case AV_PIX_FMT_YUV444P:
747  for (i = 0; i < 3; i++) {
748  ret = decode_plane(c, i, frame.f->data[i], frame.f->linesize[i],
749  avctx->width, avctx->height,
750  plane_start[i], c->frame_pred == PRED_LEFT);
751  if (ret)
752  return ret;
753  if (c->frame_pred == PRED_MEDIAN) {
754  if (!c->interlaced) {
755  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
756  avctx->width, avctx->height,
757  c->slices, 0);
758  } else {
759  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
760  avctx->width, avctx->height,
761  c->slices, 0);
762  }
763  } else if (c->frame_pred == PRED_GRADIENT) {
764  if (!c->interlaced) {
765  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
766  avctx->width, avctx->height,
767  c->slices, 0);
768  } else {
769  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
770  avctx->width, avctx->height,
771  c->slices, 0);
772  }
773  }
774  }
775  break;
777  for (i = 0; i < 3; i++) {
778  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], frame.f->linesize[i] / 2,
779  avctx->width >> !!i, avctx->height,
780  plane_start[i], plane_start[i + 1] - 1024, c->frame_pred == PRED_LEFT);
781  if (ret)
782  return ret;
783  }
784  break;
785  }
786 
787  frame.f->key_frame = 1;
788  frame.f->pict_type = AV_PICTURE_TYPE_I;
789  frame.f->interlaced_frame = !!c->interlaced;
790 
791  *got_frame = 1;
792 
793  /* always report that the buffer was completely consumed */
794  return buf_size;
795 }
796 
798 {
799  UtvideoContext * const c = avctx->priv_data;
800 
801  c->avctx = avctx;
802 
804  ff_bswapdsp_init(&c->bdsp);
806 
807  if (avctx->extradata_size >= 16) {
808  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
809  avctx->extradata[3], avctx->extradata[2],
810  avctx->extradata[1], avctx->extradata[0]);
811  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
812  AV_RB32(avctx->extradata + 4));
813  c->frame_info_size = AV_RL32(avctx->extradata + 8);
814  c->flags = AV_RL32(avctx->extradata + 12);
815 
816  if (c->frame_info_size != 4)
817  avpriv_request_sample(avctx, "Frame info not 4 bytes");
818  av_log(avctx, AV_LOG_DEBUG, "Encoding parameters %08"PRIX32"\n", c->flags);
819  c->slices = (c->flags >> 24) + 1;
820  c->compression = c->flags & 1;
821  c->interlaced = c->flags & 0x800;
822  } else if (avctx->extradata_size == 8) {
823  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
824  avctx->extradata[3], avctx->extradata[2],
825  avctx->extradata[1], avctx->extradata[0]);
826  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
827  AV_RB32(avctx->extradata + 4));
828  c->interlaced = 0;
829  c->pro = 1;
830  c->frame_info_size = 4;
831  } else {
832  av_log(avctx, AV_LOG_ERROR,
833  "Insufficient extradata size %d, should be at least 16\n",
834  avctx->extradata_size);
835  return AVERROR_INVALIDDATA;
836  }
837 
838  c->slice_bits_size = 0;
839 
840  switch (avctx->codec_tag) {
841  case MKTAG('U', 'L', 'R', 'G'):
842  c->planes = 3;
843  avctx->pix_fmt = AV_PIX_FMT_GBRP;
844  break;
845  case MKTAG('U', 'L', 'R', 'A'):
846  c->planes = 4;
847  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
848  break;
849  case MKTAG('U', 'L', 'Y', '0'):
850  c->planes = 3;
851  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
852  avctx->colorspace = AVCOL_SPC_BT470BG;
853  break;
854  case MKTAG('U', 'L', 'Y', '2'):
855  c->planes = 3;
856  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
857  avctx->colorspace = AVCOL_SPC_BT470BG;
858  break;
859  case MKTAG('U', 'L', 'Y', '4'):
860  c->planes = 3;
861  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
862  avctx->colorspace = AVCOL_SPC_BT470BG;
863  break;
864  case MKTAG('U', 'Q', 'Y', '2'):
865  c->planes = 3;
866  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
867  break;
868  case MKTAG('U', 'Q', 'R', 'G'):
869  c->planes = 3;
870  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
871  break;
872  case MKTAG('U', 'Q', 'R', 'A'):
873  c->planes = 4;
874  avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
875  break;
876  case MKTAG('U', 'L', 'H', '0'):
877  c->planes = 3;
878  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
879  avctx->colorspace = AVCOL_SPC_BT709;
880  break;
881  case MKTAG('U', 'L', 'H', '2'):
882  c->planes = 3;
883  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
884  avctx->colorspace = AVCOL_SPC_BT709;
885  break;
886  case MKTAG('U', 'L', 'H', '4'):
887  c->planes = 3;
888  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
889  avctx->colorspace = AVCOL_SPC_BT709;
890  break;
891  default:
892  av_log(avctx, AV_LOG_ERROR, "Unknown Ut Video FOURCC provided (%08X)\n",
893  avctx->codec_tag);
894  return AVERROR_INVALIDDATA;
895  }
896 
897  return 0;
898 }
899 
901 {
902  UtvideoContext * const c = avctx->priv_data;
903 
904  av_freep(&c->slice_bits);
905 
906  return 0;
907 }
908 
910  .name = "utvideo",
911  .long_name = NULL_IF_CONFIG_SMALL("Ut Video"),
912  .type = AVMEDIA_TYPE_VIDEO,
913  .id = AV_CODEC_ID_UTVIDEO,
914  .priv_data_size = sizeof(UtvideoContext),
915  .init = decode_init,
916  .close = decode_end,
917  .decode = decode_frame,
919  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
920 };
void(* bswap_buf)(uint32_t *dst, const uint32_t *src, int w)
Definition: bswapdsp.h:25
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:468
static void restore_median_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:396
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
static enum AVPixelFormat pix_fmt
int ff_ut10_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:43
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
#define C
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:381
uint32_t flags
Definition: utvideo.h:75
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVFrame * f
Definition: thread.h:36
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
void(* restore_rgb_planes)(uint8_t *src_r, uint8_t *src_g, uint8_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:28
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:472
int slice_bits_size
Definition: utvideo.h:85
int ff_init_vlc_sparse(VLC *vlc_arg, int nb_bits, int nb_codes, const void *bits, int bits_wrap, int bits_size, const void *codes, int codes_wrap, int codes_size, const void *symbols, int symbols_wrap, int symbols_size, int flags)
Definition: bitstream.c:268
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:164
int size
Definition: avcodec.h:1400
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:377
static av_cold int decode_end(AVCodecContext *avctx)
Definition: utvideodec.c:900
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1690
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
#define src
Definition: vp8dsp.c:254
AVCodec.
Definition: avcodec.h:3289
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
int interlaced
Definition: utvideo.h:79
void void avpriv_request_sample(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t
#define av_cold
Definition: attributes.h:82
static void restore_gradient_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:447
static void restore_gradient_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:485
Multithreading support functions.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1581
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:343
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:87
uint32_t frame_info
Definition: utvideo.h:75
static AVFrame * frame
#define height
uint8_t * data
Definition: avcodec.h:1399
const uint8_t * buffer
Definition: bytestream.h:34
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:170
bitstream reader API header.
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
static int decode_plane10(UtvideoContext *c, int plane_no, uint16_t *dst, ptrdiff_t stride, int width, int height, const uint8_t *src, const uint8_t *huff, int use_pred)
Definition: utvideodec.c:128
av_cold void ff_utvideodsp_init(UTVideoDSPContext *c)
Definition: utvideodsp.c:75
#define A(x)
Definition: vp56_arith.h:28
#define av_log(a,...)
static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:85
BswapDSPContext bdsp
Definition: utvideo.h:71
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:587
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static av_cold int decode_init(AVCodecContext *avctx)
Definition: utvideodec.c:797
#define AVERROR(e)
Definition: error.h:43
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
AVCodecContext * avctx
Definition: utvideo.h:69
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
uint16_t width
Definition: gdv.c:47
const char * name
Name of the codec implementation.
Definition: avcodec.h:3296
uint32_t frame_info_size
Definition: utvideo.h:75
#define FFMAX(a, b)
Definition: common.h:94
#define fail()
Definition: checkasm.h:109
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1011
Definition: vlc.h:26
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:483
static void restore_median_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:348
int compression
Definition: utvideo.h:78
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:284
static int compute_cmask(int plane_no, int interlaced, enum AVPixelFormat pix_fmt)
Definition: utvideodec.c:231
uint8_t interlaced
Definition: mxfenc.c:1898
int width
picture width / height.
Definition: avcodec.h:1653
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:554
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: utvideodec.c:536
Common Ut Video header.
int frame_pred
Definition: utvideo.h:80
uint8_t len
Definition: magicyuv.c:49
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:1481
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:1506
void * buf
Definition: avisynth_c.h:690
#define VLC_BITS
int extradata_size
Definition: avcodec.h:1582
void ff_llviddsp_init(LLVidDSPContext *c)
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2104
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:425
Definition: vf_geq.c:47
#define mid_pred
Definition: mathops.h:97
static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:41
void(* restore_rgb_planes10)(uint16_t *src_r, uint16_t *src_g, uint16_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:31
uint8_t * slice_bits
Definition: utvideo.h:84
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:362
static int decode_plane(UtvideoContext *c, int plane_no, uint8_t *dst, ptrdiff_t stride, int width, int height, const uint8_t *src, int use_pred)
Definition: utvideodec.c:241
int ff_ut_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:37
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
LLVidDSPContext llviddsp
Definition: utvideo.h:72
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:105
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal api header.
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:211
static double c[64]
uint16_t sym
Definition: magicyuv.c:48
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
Definition: avcodec.h:769
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2036
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
void * priv_data
Definition: avcodec.h:1508
int(* add_left_pred)(uint8_t *dst, const uint8_t *src, ptrdiff_t w, int left)
int len
VLC_TYPE(* table)[2]
code, bits
Definition: vlc.h:28
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:279
UTVideoDSPContext utdsp
Definition: utvideo.h:70
#define av_freep(p)
#define stride
#define MKTAG(a, b, c, d)
Definition: common.h:342
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
AVCodec ff_utvideo_decoder
Definition: utvideodec.c:909
This structure stores compressed data.
Definition: avcodec.h:1376
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:354
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:955
void(* add_median_pred)(uint8_t *dst, const uint8_t *top, const uint8_t *diff, ptrdiff_t w, int *left, int *left_top)