FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
truemotion2.c
Go to the documentation of this file.
1 /*
2  * Duck/ON2 TrueMotion 2 Decoder
3  * Copyright (c) 2005 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Duck TrueMotion2 decoder.
25  */
26 
27 #include <inttypes.h>
28 
29 #include "avcodec.h"
30 #include "bswapdsp.h"
31 #include "bytestream.h"
32 #include "get_bits.h"
33 #include "internal.h"
34 
35 #define TM2_ESCAPE 0x80000000
36 #define TM2_DELTAS 64
37 
38 /* Huffman-coded streams of different types of blocks */
40  TM2_C_HI = 0,
48 };
49 
50 /* Block types */
51 enum TM2_BLOCKS {
59 };
60 
61 typedef struct TM2Context {
64 
67 
70 
71  /* TM2 streams */
76  /* for blocks decoding */
77  int D[4];
78  int CD[4];
79  int *last;
80  int *clast;
81 
82  /* data for current and previous frame */
84  int *Y1, *U1, *V1, *Y2, *U2, *V2;
86  int cur;
87 } TM2Context;
88 
89 /**
90 * Huffman codes for each of streams
91 */
92 typedef struct TM2Codes {
93  VLC vlc; ///< table for FFmpeg bitstream reader
94  int bits;
95  int *recode; ///< table for converting from code indexes to values
96  int length;
97 } TM2Codes;
98 
99 /**
100 * structure for gathering Huffman codes information
101 */
102 typedef struct TM2Huff {
103  int val_bits; ///< length of literal
104  int max_bits; ///< maximum length of code
105  int min_bits; ///< minimum length of code
106  int nodes; ///< total number of nodes in tree
107  int num; ///< current number filled
108  int max_num; ///< total number of codes
109  int *nums; ///< literals
110  uint32_t *bits; ///< codes
111  int *lens; ///< codelengths
112 } TM2Huff;
113 
114 static int tm2_read_tree(TM2Context *ctx, uint32_t prefix, int length, TM2Huff *huff)
115 {
116  int ret;
117  if (length > huff->max_bits) {
118  av_log(ctx->avctx, AV_LOG_ERROR, "Tree exceeded its given depth (%i)\n",
119  huff->max_bits);
120  return AVERROR_INVALIDDATA;
121  }
122 
123  if (!get_bits1(&ctx->gb)) { /* literal */
124  if (length == 0) {
125  length = 1;
126  }
127  if (huff->num >= huff->max_num) {
128  av_log(ctx->avctx, AV_LOG_DEBUG, "Too many literals\n");
129  return AVERROR_INVALIDDATA;
130  }
131  huff->nums[huff->num] = get_bits_long(&ctx->gb, huff->val_bits);
132  huff->bits[huff->num] = prefix;
133  huff->lens[huff->num] = length;
134  huff->num++;
135  return 0;
136  } else { /* non-terminal node */
137  if ((ret = tm2_read_tree(ctx, prefix << 1, length + 1, huff)) < 0)
138  return ret;
139  if ((ret = tm2_read_tree(ctx, (prefix << 1) | 1, length + 1, huff)) < 0)
140  return ret;
141  }
142  return 0;
143 }
144 
145 static int tm2_build_huff_table(TM2Context *ctx, TM2Codes *code)
146 {
147  TM2Huff huff;
148  int res = 0;
149 
150  huff.val_bits = get_bits(&ctx->gb, 5);
151  huff.max_bits = get_bits(&ctx->gb, 5);
152  huff.min_bits = get_bits(&ctx->gb, 5);
153  huff.nodes = get_bits_long(&ctx->gb, 17);
154  huff.num = 0;
155 
156  /* check for correct codes parameters */
157  if ((huff.val_bits < 1) || (huff.val_bits > 32) ||
158  (huff.max_bits < 0) || (huff.max_bits > 25)) {
159  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect tree parameters - literal "
160  "length: %i, max code length: %i\n", huff.val_bits, huff.max_bits);
161  return AVERROR_INVALIDDATA;
162  }
163  if ((huff.nodes <= 0) || (huff.nodes > 0x10000)) {
164  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect number of Huffman tree "
165  "nodes: %i\n", huff.nodes);
166  return AVERROR_INVALIDDATA;
167  }
168  /* one-node tree */
169  if (huff.max_bits == 0)
170  huff.max_bits = 1;
171 
172  /* allocate space for codes - it is exactly ceil(nodes / 2) entries */
173  huff.max_num = (huff.nodes + 1) >> 1;
174  huff.nums = av_calloc(huff.max_num, sizeof(int));
175  huff.bits = av_calloc(huff.max_num, sizeof(uint32_t));
176  huff.lens = av_calloc(huff.max_num, sizeof(int));
177 
178  if (!huff.nums || !huff.bits || !huff.lens) {
179  res = AVERROR(ENOMEM);
180  goto out;
181  }
182 
183  res = tm2_read_tree(ctx, 0, 0, &huff);
184 
185  if (huff.num != huff.max_num) {
186  av_log(ctx->avctx, AV_LOG_ERROR, "Got less codes than expected: %i of %i\n",
187  huff.num, huff.max_num);
188  res = AVERROR_INVALIDDATA;
189  }
190 
191  /* convert codes to vlc_table */
192  if (res >= 0) {
193  int i;
194 
195  res = init_vlc(&code->vlc, huff.max_bits, huff.max_num,
196  huff.lens, sizeof(int), sizeof(int),
197  huff.bits, sizeof(uint32_t), sizeof(uint32_t), 0);
198  if (res < 0)
199  av_log(ctx->avctx, AV_LOG_ERROR, "Cannot build VLC table\n");
200  else {
201  code->bits = huff.max_bits;
202  code->length = huff.max_num;
203  code->recode = av_malloc_array(code->length, sizeof(int));
204  if (!code->recode) {
205  res = AVERROR(ENOMEM);
206  goto out;
207  }
208  for (i = 0; i < code->length; i++)
209  code->recode[i] = huff.nums[i];
210  }
211  }
212 
213 out:
214  /* free allocated memory */
215  av_free(huff.nums);
216  av_free(huff.bits);
217  av_free(huff.lens);
218 
219  return res;
220 }
221 
222 static void tm2_free_codes(TM2Codes *code)
223 {
224  av_free(code->recode);
225  if (code->vlc.table)
226  ff_free_vlc(&code->vlc);
227 }
228 
229 static inline int tm2_get_token(GetBitContext *gb, TM2Codes *code)
230 {
231  int val;
232  val = get_vlc2(gb, code->vlc.table, code->bits, 1);
233  if(val<0)
234  return -1;
235  return code->recode[val];
236 }
237 
238 #define TM2_OLD_HEADER_MAGIC 0x00000100
239 #define TM2_NEW_HEADER_MAGIC 0x00000101
240 
241 static inline int tm2_read_header(TM2Context *ctx, const uint8_t *buf)
242 {
243  uint32_t magic = AV_RL32(buf);
244 
245  switch (magic) {
247  avpriv_request_sample(ctx->avctx, "Old TM2 header");
248  return 0;
250  return 0;
251  default:
252  av_log(ctx->avctx, AV_LOG_ERROR, "Not a TM2 header: 0x%08"PRIX32"\n",
253  magic);
254  return AVERROR_INVALIDDATA;
255  }
256 }
257 
258 static int tm2_read_deltas(TM2Context *ctx, int stream_id)
259 {
260  int d, mb;
261  int i, v;
262 
263  d = get_bits(&ctx->gb, 9);
264  mb = get_bits(&ctx->gb, 5);
265 
266  av_assert2(mb < 32);
267  if ((d < 1) || (d > TM2_DELTAS) || (mb < 1)) {
268  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect delta table: %i deltas x %i bits\n", d, mb);
269  return AVERROR_INVALIDDATA;
270  }
271 
272  for (i = 0; i < d; i++) {
273  v = get_bits_long(&ctx->gb, mb);
274  if (v & (1 << (mb - 1)))
275  ctx->deltas[stream_id][i] = v - (1 << mb);
276  else
277  ctx->deltas[stream_id][i] = v;
278  }
279  for (; i < TM2_DELTAS; i++)
280  ctx->deltas[stream_id][i] = 0;
281 
282  return 0;
283 }
284 
285 static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, int buf_size)
286 {
287  int i, ret;
288  int skip = 0;
289  int len, toks, pos;
290  TM2Codes codes;
291  GetByteContext gb;
292 
293  if (buf_size < 4) {
294  av_log(ctx->avctx, AV_LOG_ERROR, "not enough space for len left\n");
295  return AVERROR_INVALIDDATA;
296  }
297 
298  /* get stream length in dwords */
299  bytestream2_init(&gb, buf, buf_size);
300  len = bytestream2_get_be32(&gb);
301  skip = len * 4 + 4;
302 
303  if (len == 0)
304  return 4;
305 
306  if (len >= INT_MAX/4-1 || len < 0 || skip > buf_size) {
307  av_log(ctx->avctx, AV_LOG_ERROR, "invalid stream size\n");
308  return AVERROR_INVALIDDATA;
309  }
310 
311  toks = bytestream2_get_be32(&gb);
312  if (toks & 1) {
313  len = bytestream2_get_be32(&gb);
314  if (len == TM2_ESCAPE) {
315  len = bytestream2_get_be32(&gb);
316  }
317  if (len > 0) {
318  pos = bytestream2_tell(&gb);
319  if (skip <= pos)
320  return AVERROR_INVALIDDATA;
321  init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8);
322  if ((ret = tm2_read_deltas(ctx, stream_id)) < 0)
323  return ret;
324  bytestream2_skip(&gb, ((get_bits_count(&ctx->gb) + 31) >> 5) << 2);
325  }
326  }
327  /* skip unused fields */
328  len = bytestream2_get_be32(&gb);
329  if (len == TM2_ESCAPE) { /* some unknown length - could be escaped too */
330  bytestream2_skip(&gb, 8); /* unused by decoder */
331  } else {
332  bytestream2_skip(&gb, 4); /* unused by decoder */
333  }
334 
335  pos = bytestream2_tell(&gb);
336  if (skip <= pos)
337  return AVERROR_INVALIDDATA;
338  init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8);
339  if ((ret = tm2_build_huff_table(ctx, &codes)) < 0)
340  return ret;
341  bytestream2_skip(&gb, ((get_bits_count(&ctx->gb) + 31) >> 5) << 2);
342 
343  toks >>= 1;
344  /* check if we have sane number of tokens */
345  if ((toks < 0) || (toks > 0xFFFFFF)) {
346  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect number of tokens: %i\n", toks);
347  tm2_free_codes(&codes);
348  return AVERROR_INVALIDDATA;
349  }
350  ret = av_reallocp_array(&ctx->tokens[stream_id], toks, sizeof(int));
351  if (ret < 0) {
352  ctx->tok_lens[stream_id] = 0;
353  return ret;
354  }
355  ctx->tok_lens[stream_id] = toks;
356  len = bytestream2_get_be32(&gb);
357  if (len > 0) {
358  pos = bytestream2_tell(&gb);
359  if (skip <= pos)
360  return AVERROR_INVALIDDATA;
361  init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8);
362  for (i = 0; i < toks; i++) {
363  if (get_bits_left(&ctx->gb) <= 0) {
364  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect number of tokens: %i\n", toks);
365  return AVERROR_INVALIDDATA;
366  }
367  ctx->tokens[stream_id][i] = tm2_get_token(&ctx->gb, &codes);
368  if (stream_id <= TM2_MOT && ctx->tokens[stream_id][i] >= TM2_DELTAS || ctx->tokens[stream_id][i]<0) {
369  av_log(ctx->avctx, AV_LOG_ERROR, "Invalid delta token index %d for type %d, n=%d\n",
370  ctx->tokens[stream_id][i], stream_id, i);
371  return AVERROR_INVALIDDATA;
372  }
373  }
374  } else {
375  for (i = 0; i < toks; i++) {
376  ctx->tokens[stream_id][i] = codes.recode[0];
377  if (stream_id <= TM2_MOT && ctx->tokens[stream_id][i] >= TM2_DELTAS) {
378  av_log(ctx->avctx, AV_LOG_ERROR, "Invalid delta token index %d for type %d, n=%d\n",
379  ctx->tokens[stream_id][i], stream_id, i);
380  return AVERROR_INVALIDDATA;
381  }
382  }
383  }
384  tm2_free_codes(&codes);
385 
386  return skip;
387 }
388 
389 static inline int GET_TOK(TM2Context *ctx,int type)
390 {
391  if (ctx->tok_ptrs[type] >= ctx->tok_lens[type]) {
392  av_log(ctx->avctx, AV_LOG_ERROR, "Read token from stream %i out of bounds (%i>=%i)\n", type, ctx->tok_ptrs[type], ctx->tok_lens[type]);
393  return 0;
394  }
395  if (type <= TM2_MOT) {
396  if (ctx->tokens[type][ctx->tok_ptrs[type]] >= TM2_DELTAS) {
397  av_log(ctx->avctx, AV_LOG_ERROR, "token %d is too large\n", ctx->tokens[type][ctx->tok_ptrs[type]]);
398  return 0;
399  }
400  return ctx->deltas[type][ctx->tokens[type][ctx->tok_ptrs[type]++]];
401  }
402  return ctx->tokens[type][ctx->tok_ptrs[type]++];
403 }
404 
405 /* blocks decoding routines */
406 
407 /* common Y, U, V pointers initialisation */
408 #define TM2_INIT_POINTERS() \
409  int *last, *clast; \
410  int *Y, *U, *V;\
411  int Ystride, Ustride, Vstride;\
412 \
413  Ystride = ctx->y_stride;\
414  Vstride = ctx->uv_stride;\
415  Ustride = ctx->uv_stride;\
416  Y = (ctx->cur?ctx->Y2:ctx->Y1) + by * 4 * Ystride + bx * 4;\
417  V = (ctx->cur?ctx->V2:ctx->V1) + by * 2 * Vstride + bx * 2;\
418  U = (ctx->cur?ctx->U2:ctx->U1) + by * 2 * Ustride + bx * 2;\
419  last = ctx->last + bx * 4;\
420  clast = ctx->clast + bx * 4;
421 
422 #define TM2_INIT_POINTERS_2() \
423  int *Yo, *Uo, *Vo;\
424  int oYstride, oUstride, oVstride;\
425 \
426  TM2_INIT_POINTERS();\
427  oYstride = Ystride;\
428  oVstride = Vstride;\
429  oUstride = Ustride;\
430  Yo = (ctx->cur?ctx->Y1:ctx->Y2) + by * 4 * oYstride + bx * 4;\
431  Vo = (ctx->cur?ctx->V1:ctx->V2) + by * 2 * oVstride + bx * 2;\
432  Uo = (ctx->cur?ctx->U1:ctx->U2) + by * 2 * oUstride + bx * 2;
433 
434 /* recalculate last and delta values for next blocks */
435 #define TM2_RECALC_BLOCK(CHR, stride, last, CD) {\
436  CD[0] = CHR[1] - last[1];\
437  CD[1] = (int)CHR[stride + 1] - (int)CHR[1];\
438  last[0] = (int)CHR[stride + 0];\
439  last[1] = (int)CHR[stride + 1];}
440 
441 /* common operations - add deltas to 4x4 block of luma or 2x2 blocks of chroma */
442 static inline void tm2_apply_deltas(TM2Context *ctx, int* Y, int stride, int *deltas, int *last)
443 {
444  int ct, d;
445  int i, j;
446 
447  for (j = 0; j < 4; j++){
448  ct = ctx->D[j];
449  for (i = 0; i < 4; i++){
450  d = deltas[i + j * 4];
451  ct += d;
452  last[i] += ct;
453  Y[i] = av_clip_uint8(last[i]);
454  }
455  Y += stride;
456  ctx->D[j] = ct;
457  }
458 }
459 
460 static inline void tm2_high_chroma(int *data, int stride, int *last, int *CD, int *deltas)
461 {
462  int i, j;
463  for (j = 0; j < 2; j++) {
464  for (i = 0; i < 2; i++) {
465  CD[j] += deltas[i + j * 2];
466  last[i] += CD[j];
467  data[i] = last[i];
468  }
469  data += stride;
470  }
471 }
472 
473 static inline void tm2_low_chroma(int *data, int stride, int *clast, int *CD, int *deltas, int bx)
474 {
475  int t;
476  int l;
477  int prev;
478 
479  if (bx > 0)
480  prev = clast[-3];
481  else
482  prev = 0;
483  t = (CD[0] + CD[1]) >> 1;
484  l = (prev - CD[0] - CD[1] + clast[1]) >> 1;
485  CD[1] = CD[0] + CD[1] - t;
486  CD[0] = t;
487  clast[0] = l;
488 
489  tm2_high_chroma(data, stride, clast, CD, deltas);
490 }
491 
492 static inline void tm2_hi_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
493 {
494  int i;
495  int deltas[16];
497 
498  /* hi-res chroma */
499  for (i = 0; i < 4; i++) {
500  deltas[i] = GET_TOK(ctx, TM2_C_HI);
501  deltas[i + 4] = GET_TOK(ctx, TM2_C_HI);
502  }
503  tm2_high_chroma(U, Ustride, clast, ctx->CD, deltas);
504  tm2_high_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas + 4);
505 
506  /* hi-res luma */
507  for (i = 0; i < 16; i++)
508  deltas[i] = GET_TOK(ctx, TM2_L_HI);
509 
510  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
511 }
512 
513 static inline void tm2_med_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
514 {
515  int i;
516  int deltas[16];
518 
519  /* low-res chroma */
520  deltas[0] = GET_TOK(ctx, TM2_C_LO);
521  deltas[1] = deltas[2] = deltas[3] = 0;
522  tm2_low_chroma(U, Ustride, clast, ctx->CD, deltas, bx);
523 
524  deltas[0] = GET_TOK(ctx, TM2_C_LO);
525  deltas[1] = deltas[2] = deltas[3] = 0;
526  tm2_low_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas, bx);
527 
528  /* hi-res luma */
529  for (i = 0; i < 16; i++)
530  deltas[i] = GET_TOK(ctx, TM2_L_HI);
531 
532  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
533 }
534 
535 static inline void tm2_low_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
536 {
537  int i;
538  int t1, t2;
539  int deltas[16];
541 
542  /* low-res chroma */
543  deltas[0] = GET_TOK(ctx, TM2_C_LO);
544  deltas[1] = deltas[2] = deltas[3] = 0;
545  tm2_low_chroma(U, Ustride, clast, ctx->CD, deltas, bx);
546 
547  deltas[0] = GET_TOK(ctx, TM2_C_LO);
548  deltas[1] = deltas[2] = deltas[3] = 0;
549  tm2_low_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas, bx);
550 
551  /* low-res luma */
552  for (i = 0; i < 16; i++)
553  deltas[i] = 0;
554 
555  deltas[ 0] = GET_TOK(ctx, TM2_L_LO);
556  deltas[ 2] = GET_TOK(ctx, TM2_L_LO);
557  deltas[ 8] = GET_TOK(ctx, TM2_L_LO);
558  deltas[10] = GET_TOK(ctx, TM2_L_LO);
559 
560  if (bx > 0)
561  last[0] = (last[-1] - ctx->D[0] - ctx->D[1] - ctx->D[2] - ctx->D[3] + last[1]) >> 1;
562  else
563  last[0] = (last[1] - ctx->D[0] - ctx->D[1] - ctx->D[2] - ctx->D[3])>> 1;
564  last[2] = (last[1] + last[3]) >> 1;
565 
566  t1 = ctx->D[0] + ctx->D[1];
567  ctx->D[0] = t1 >> 1;
568  ctx->D[1] = t1 - (t1 >> 1);
569  t2 = ctx->D[2] + ctx->D[3];
570  ctx->D[2] = t2 >> 1;
571  ctx->D[3] = t2 - (t2 >> 1);
572 
573  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
574 }
575 
576 static inline void tm2_null_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
577 {
578  int i;
579  int ct;
580  int left, right, diff;
581  int deltas[16];
583 
584  /* null chroma */
585  deltas[0] = deltas[1] = deltas[2] = deltas[3] = 0;
586  tm2_low_chroma(U, Ustride, clast, ctx->CD, deltas, bx);
587 
588  deltas[0] = deltas[1] = deltas[2] = deltas[3] = 0;
589  tm2_low_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas, bx);
590 
591  /* null luma */
592  for (i = 0; i < 16; i++)
593  deltas[i] = 0;
594 
595  ct = ctx->D[0] + ctx->D[1] + ctx->D[2] + ctx->D[3];
596 
597  if (bx > 0)
598  left = last[-1] - ct;
599  else
600  left = 0;
601 
602  right = last[3];
603  diff = right - left;
604  last[0] = left + (diff >> 2);
605  last[1] = left + (diff >> 1);
606  last[2] = right - (diff >> 2);
607  last[3] = right;
608  {
609  int tp = left;
610 
611  ctx->D[0] = (tp + (ct >> 2)) - left;
612  left += ctx->D[0];
613  ctx->D[1] = (tp + (ct >> 1)) - left;
614  left += ctx->D[1];
615  ctx->D[2] = ((tp + ct) - (ct >> 2)) - left;
616  left += ctx->D[2];
617  ctx->D[3] = (tp + ct) - left;
618  }
619  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
620 }
621 
622 static inline void tm2_still_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
623 {
624  int i, j;
626 
627  /* update chroma */
628  for (j = 0; j < 2; j++) {
629  for (i = 0; i < 2; i++){
630  U[i] = Uo[i];
631  V[i] = Vo[i];
632  }
633  U += Ustride; V += Vstride;
634  Uo += oUstride; Vo += oVstride;
635  }
636  U -= Ustride * 2;
637  V -= Vstride * 2;
638  TM2_RECALC_BLOCK(U, Ustride, clast, ctx->CD);
639  TM2_RECALC_BLOCK(V, Vstride, (clast + 2), (ctx->CD + 2));
640 
641  /* update deltas */
642  ctx->D[0] = Yo[3] - last[3];
643  ctx->D[1] = Yo[3 + oYstride] - Yo[3];
644  ctx->D[2] = Yo[3 + oYstride * 2] - Yo[3 + oYstride];
645  ctx->D[3] = Yo[3 + oYstride * 3] - Yo[3 + oYstride * 2];
646 
647  for (j = 0; j < 4; j++) {
648  for (i = 0; i < 4; i++) {
649  Y[i] = Yo[i];
650  last[i] = Yo[i];
651  }
652  Y += Ystride;
653  Yo += oYstride;
654  }
655 }
656 
657 static inline void tm2_update_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
658 {
659  int i, j;
660  int d;
662 
663  /* update chroma */
664  for (j = 0; j < 2; j++) {
665  for (i = 0; i < 2; i++) {
666  U[i] = Uo[i] + GET_TOK(ctx, TM2_UPD);
667  V[i] = Vo[i] + GET_TOK(ctx, TM2_UPD);
668  }
669  U += Ustride;
670  V += Vstride;
671  Uo += oUstride;
672  Vo += oVstride;
673  }
674  U -= Ustride * 2;
675  V -= Vstride * 2;
676  TM2_RECALC_BLOCK(U, Ustride, clast, ctx->CD);
677  TM2_RECALC_BLOCK(V, Vstride, (clast + 2), (ctx->CD + 2));
678 
679  /* update deltas */
680  ctx->D[0] = Yo[3] - last[3];
681  ctx->D[1] = Yo[3 + oYstride] - Yo[3];
682  ctx->D[2] = Yo[3 + oYstride * 2] - Yo[3 + oYstride];
683  ctx->D[3] = Yo[3 + oYstride * 3] - Yo[3 + oYstride * 2];
684 
685  for (j = 0; j < 4; j++) {
686  d = last[3];
687  for (i = 0; i < 4; i++) {
688  Y[i] = Yo[i] + GET_TOK(ctx, TM2_UPD);
689  last[i] = Y[i];
690  }
691  ctx->D[j] = last[3] - d;
692  Y += Ystride;
693  Yo += oYstride;
694  }
695 }
696 
697 static inline void tm2_motion_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
698 {
699  int i, j;
700  int mx, my;
702 
703  mx = GET_TOK(ctx, TM2_MOT);
704  my = GET_TOK(ctx, TM2_MOT);
705  mx = av_clip(mx, -(bx * 4 + 4), ctx->avctx->width - bx * 4);
706  my = av_clip(my, -(by * 4 + 4), ctx->avctx->height - by * 4);
707 
708  if (4*bx+mx<0 || 4*by+my<0 || 4*bx+mx+4 > ctx->avctx->width || 4*by+my+4 > ctx->avctx->height) {
709  av_log(ctx->avctx, AV_LOG_ERROR, "MV out of picture\n");
710  return;
711  }
712 
713  Yo += my * oYstride + mx;
714  Uo += (my >> 1) * oUstride + (mx >> 1);
715  Vo += (my >> 1) * oVstride + (mx >> 1);
716 
717  /* copy chroma */
718  for (j = 0; j < 2; j++) {
719  for (i = 0; i < 2; i++) {
720  U[i] = Uo[i];
721  V[i] = Vo[i];
722  }
723  U += Ustride;
724  V += Vstride;
725  Uo += oUstride;
726  Vo += oVstride;
727  }
728  U -= Ustride * 2;
729  V -= Vstride * 2;
730  TM2_RECALC_BLOCK(U, Ustride, clast, ctx->CD);
731  TM2_RECALC_BLOCK(V, Vstride, (clast + 2), (ctx->CD + 2));
732 
733  /* copy luma */
734  for (j = 0; j < 4; j++) {
735  for (i = 0; i < 4; i++) {
736  Y[i] = Yo[i];
737  }
738  Y += Ystride;
739  Yo += oYstride;
740  }
741  /* calculate deltas */
742  Y -= Ystride * 4;
743  ctx->D[0] = Y[3] - last[3];
744  ctx->D[1] = Y[3 + Ystride] - Y[3];
745  ctx->D[2] = Y[3 + Ystride * 2] - Y[3 + Ystride];
746  ctx->D[3] = Y[3 + Ystride * 3] - Y[3 + Ystride * 2];
747  for (i = 0; i < 4; i++)
748  last[i] = Y[i + Ystride * 3];
749 }
750 
752 {
753  int i, j;
754  int w = ctx->avctx->width, h = ctx->avctx->height, bw = w >> 2, bh = h >> 2, cw = w >> 1;
755  int type;
756  int keyframe = 1;
757  int *Y, *U, *V;
758  uint8_t *dst;
759 
760  for (i = 0; i < TM2_NUM_STREAMS; i++)
761  ctx->tok_ptrs[i] = 0;
762 
763  if (ctx->tok_lens[TM2_TYPE]<bw*bh) {
764  av_log(ctx->avctx,AV_LOG_ERROR,"Got %i tokens for %i blocks\n",ctx->tok_lens[TM2_TYPE],bw*bh);
765  return AVERROR_INVALIDDATA;
766  }
767 
768  memset(ctx->last, 0, 4 * bw * sizeof(int));
769  memset(ctx->clast, 0, 4 * bw * sizeof(int));
770 
771  for (j = 0; j < bh; j++) {
772  memset(ctx->D, 0, 4 * sizeof(int));
773  memset(ctx->CD, 0, 4 * sizeof(int));
774  for (i = 0; i < bw; i++) {
775  type = GET_TOK(ctx, TM2_TYPE);
776  switch(type) {
777  case TM2_HI_RES:
778  tm2_hi_res_block(ctx, p, i, j);
779  break;
780  case TM2_MED_RES:
781  tm2_med_res_block(ctx, p, i, j);
782  break;
783  case TM2_LOW_RES:
784  tm2_low_res_block(ctx, p, i, j);
785  break;
786  case TM2_NULL_RES:
787  tm2_null_res_block(ctx, p, i, j);
788  break;
789  case TM2_UPDATE:
790  tm2_update_block(ctx, p, i, j);
791  keyframe = 0;
792  break;
793  case TM2_STILL:
794  tm2_still_block(ctx, p, i, j);
795  keyframe = 0;
796  break;
797  case TM2_MOTION:
798  tm2_motion_block(ctx, p, i, j);
799  keyframe = 0;
800  break;
801  default:
802  av_log(ctx->avctx, AV_LOG_ERROR, "Skipping unknown block type %i\n", type);
803  }
804  }
805  }
806 
807  /* copy data from our buffer to AVFrame */
808  Y = (ctx->cur?ctx->Y2:ctx->Y1);
809  U = (ctx->cur?ctx->U2:ctx->U1);
810  V = (ctx->cur?ctx->V2:ctx->V1);
811  dst = p->data[0];
812  for (j = 0; j < h; j++) {
813  for (i = 0; i < w; i++) {
814  int y = Y[i], u = U[i >> 1], v = V[i >> 1];
815  dst[3*i+0] = av_clip_uint8(y + v);
816  dst[3*i+1] = av_clip_uint8(y);
817  dst[3*i+2] = av_clip_uint8(y + u);
818  }
819 
820  /* horizontal edge extension */
821  Y[-4] = Y[-3] = Y[-2] = Y[-1] = Y[0];
822  Y[w + 3] = Y[w + 2] = Y[w + 1] = Y[w] = Y[w - 1];
823 
824  /* vertical edge extension */
825  if (j == 0) {
826  memcpy(Y - 4 - 1 * ctx->y_stride, Y - 4, ctx->y_stride);
827  memcpy(Y - 4 - 2 * ctx->y_stride, Y - 4, ctx->y_stride);
828  memcpy(Y - 4 - 3 * ctx->y_stride, Y - 4, ctx->y_stride);
829  memcpy(Y - 4 - 4 * ctx->y_stride, Y - 4, ctx->y_stride);
830  } else if (j == h - 1) {
831  memcpy(Y - 4 + 1 * ctx->y_stride, Y - 4, ctx->y_stride);
832  memcpy(Y - 4 + 2 * ctx->y_stride, Y - 4, ctx->y_stride);
833  memcpy(Y - 4 + 3 * ctx->y_stride, Y - 4, ctx->y_stride);
834  memcpy(Y - 4 + 4 * ctx->y_stride, Y - 4, ctx->y_stride);
835  }
836 
837  Y += ctx->y_stride;
838  if (j & 1) {
839  /* horizontal edge extension */
840  U[-2] = U[-1] = U[0];
841  V[-2] = V[-1] = V[0];
842  U[cw + 1] = U[cw] = U[cw - 1];
843  V[cw + 1] = V[cw] = V[cw - 1];
844 
845  /* vertical edge extension */
846  if (j == 1) {
847  memcpy(U - 2 - 1 * ctx->uv_stride, U - 2, ctx->uv_stride);
848  memcpy(V - 2 - 1 * ctx->uv_stride, V - 2, ctx->uv_stride);
849  memcpy(U - 2 - 2 * ctx->uv_stride, U - 2, ctx->uv_stride);
850  memcpy(V - 2 - 2 * ctx->uv_stride, V - 2, ctx->uv_stride);
851  } else if (j == h - 1) {
852  memcpy(U - 2 + 1 * ctx->uv_stride, U - 2, ctx->uv_stride);
853  memcpy(V - 2 + 1 * ctx->uv_stride, V - 2, ctx->uv_stride);
854  memcpy(U - 2 + 2 * ctx->uv_stride, U - 2, ctx->uv_stride);
855  memcpy(V - 2 + 2 * ctx->uv_stride, V - 2, ctx->uv_stride);
856  }
857 
858  U += ctx->uv_stride;
859  V += ctx->uv_stride;
860  }
861  dst += p->linesize[0];
862  }
863 
864  return keyframe;
865 }
866 
867 static const int tm2_stream_order[TM2_NUM_STREAMS] = {
869 };
870 
871 #define TM2_HEADER_SIZE 40
872 
873 static int decode_frame(AVCodecContext *avctx,
874  void *data, int *got_frame,
875  AVPacket *avpkt)
876 {
877  TM2Context * const l = avctx->priv_data;
878  const uint8_t *buf = avpkt->data;
879  int buf_size = avpkt->size & ~3;
880  AVFrame * const p = l->pic;
881  int offset = TM2_HEADER_SIZE;
882  int i, t, ret;
883 
884  av_fast_padded_malloc(&l->buffer, &l->buffer_size, buf_size);
885  if (!l->buffer) {
886  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
887  return AVERROR(ENOMEM);
888  }
889 
890  if ((ret = ff_reget_buffer(avctx, p)) < 0)
891  return ret;
892 
893  l->bdsp.bswap_buf((uint32_t *) l->buffer, (const uint32_t *) buf,
894  buf_size >> 2);
895 
896  if ((ret = tm2_read_header(l, l->buffer)) < 0) {
897  return ret;
898  }
899 
900  for (i = 0; i < TM2_NUM_STREAMS; i++) {
901  if (offset >= buf_size) {
902  av_log(avctx, AV_LOG_ERROR, "no space for tm2_read_stream\n");
903  return AVERROR_INVALIDDATA;
904  }
905 
906  t = tm2_read_stream(l, l->buffer + offset, tm2_stream_order[i],
907  buf_size - offset);
908  if (t < 0) {
909  int j = tm2_stream_order[i];
910  memset(l->tokens[j], 0, sizeof(**l->tokens) * l->tok_lens[j]);
911  return t;
912  }
913  offset += t;
914  }
915  p->key_frame = tm2_decode_blocks(l, p);
916  if (p->key_frame)
918  else
920 
921  l->cur = !l->cur;
922  *got_frame = 1;
923  ret = av_frame_ref(data, l->pic);
924 
925  return (ret < 0) ? ret : buf_size;
926 }
927 
929 {
930  TM2Context * const l = avctx->priv_data;
931  int i, w = avctx->width, h = avctx->height;
932 
933  if ((avctx->width & 3) || (avctx->height & 3)) {
934  av_log(avctx, AV_LOG_ERROR, "Width and height must be multiple of 4\n");
935  return AVERROR(EINVAL);
936  }
937 
938  l->avctx = avctx;
939  avctx->pix_fmt = AV_PIX_FMT_BGR24;
940 
941  l->pic = av_frame_alloc();
942  if (!l->pic)
943  return AVERROR(ENOMEM);
944 
945  ff_bswapdsp_init(&l->bdsp);
946 
947  l->last = av_malloc_array(w >> 2, 4 * sizeof(*l->last) );
948  l->clast = av_malloc_array(w >> 2, 4 * sizeof(*l->clast));
949 
950  for (i = 0; i < TM2_NUM_STREAMS; i++) {
951  l->tokens[i] = NULL;
952  l->tok_lens[i] = 0;
953  }
954 
955  w += 8;
956  h += 8;
957  l->Y1_base = av_calloc(w * h, sizeof(*l->Y1_base));
958  l->Y2_base = av_calloc(w * h, sizeof(*l->Y2_base));
959  l->y_stride = w;
960  w = (w + 1) >> 1;
961  h = (h + 1) >> 1;
962  l->U1_base = av_calloc(w * h, sizeof(*l->U1_base));
963  l->V1_base = av_calloc(w * h, sizeof(*l->V1_base));
964  l->U2_base = av_calloc(w * h, sizeof(*l->U2_base));
965  l->V2_base = av_calloc(w * h, sizeof(*l->V1_base));
966  l->uv_stride = w;
967  l->cur = 0;
968  if (!l->Y1_base || !l->Y2_base || !l->U1_base ||
969  !l->V1_base || !l->U2_base || !l->V2_base ||
970  !l->last || !l->clast) {
971  av_freep(&l->Y1_base);
972  av_freep(&l->Y2_base);
973  av_freep(&l->U1_base);
974  av_freep(&l->U2_base);
975  av_freep(&l->V1_base);
976  av_freep(&l->V2_base);
977  av_freep(&l->last);
978  av_freep(&l->clast);
979  av_frame_free(&l->pic);
980  return AVERROR(ENOMEM);
981  }
982  l->Y1 = l->Y1_base + l->y_stride * 4 + 4;
983  l->Y2 = l->Y2_base + l->y_stride * 4 + 4;
984  l->U1 = l->U1_base + l->uv_stride * 2 + 2;
985  l->U2 = l->U2_base + l->uv_stride * 2 + 2;
986  l->V1 = l->V1_base + l->uv_stride * 2 + 2;
987  l->V2 = l->V2_base + l->uv_stride * 2 + 2;
988 
989  return 0;
990 }
991 
993 {
994  TM2Context * const l = avctx->priv_data;
995  int i;
996 
997  av_free(l->last);
998  av_free(l->clast);
999  for (i = 0; i < TM2_NUM_STREAMS; i++)
1000  av_freep(&l->tokens[i]);
1001  if (l->Y1) {
1002  av_freep(&l->Y1_base);
1003  av_freep(&l->U1_base);
1004  av_freep(&l->V1_base);
1005  av_freep(&l->Y2_base);
1006  av_freep(&l->U2_base);
1007  av_freep(&l->V2_base);
1008  }
1009  av_freep(&l->buffer);
1010  l->buffer_size = 0;
1011 
1012  av_frame_free(&l->pic);
1013 
1014  return 0;
1015 }
1016 
1018  .name = "truemotion2",
1019  .long_name = NULL_IF_CONFIG_SMALL("Duck TrueMotion 2.0"),
1020  .type = AVMEDIA_TYPE_VIDEO,
1022  .priv_data_size = sizeof(TM2Context),
1023  .init = decode_init,
1024  .close = decode_end,
1025  .decode = decode_frame,
1026  .capabilities = AV_CODEC_CAP_DR1,
1027 };
void(* bswap_buf)(uint32_t *dst, const uint32_t *src, int w)
Definition: bswapdsp.h:25
#define NULL
Definition: coverity.c:32
static void tm2_low_chroma(int *data, int stride, int *clast, int *CD, int *deltas, int bx)
Definition: truemotion2.c:473
const char const char void * val
Definition: avisynth_c.h:634
float v
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:171
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
int * V2
Definition: truemotion2.c:84
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:260
int * U1_base
Definition: truemotion2.c:83
int D[4]
Definition: truemotion2.c:77
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
int * last
Definition: truemotion2.c:79
int * recode
table for converting from code indexes to values
Definition: truemotion2.c:95
int size
Definition: avcodec.h:1424
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1722
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:126
AVCodec ff_truemotion2_decoder
Definition: truemotion2.c:1017
AVCodec.
Definition: avcodec.h:3472
int num
current number filled
Definition: truemotion2.c:107
static int tm2_read_header(TM2Context *ctx, const uint8_t *buf)
Definition: truemotion2.c:241
#define TM2_HEADER_SIZE
Definition: truemotion2.c:871
void void avpriv_request_sample(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
structure for gathering Huffman codes information
Definition: truemotion2.c:102
uint8_t
#define av_cold
Definition: attributes.h:74
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:135
#define mb
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:63
#define Y
Definition: vf_boxblur.c:76
TM2_STREAMS
Definition: truemotion2.c:39
static const int tm2_stream_order[TM2_NUM_STREAMS]
Definition: truemotion2.c:867
AVCodecContext * avctx
Definition: truemotion2.c:62
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:365
int max_bits
maximum length of code
Definition: truemotion2.c:104
static int tm2_read_deltas(TM2Context *ctx, int stream_id)
Definition: truemotion2.c:258
static void tm2_free_codes(TM2Codes *code)
Definition: truemotion2.c:222
uint8_t * data
Definition: avcodec.h:1423
int min_bits
minimum length of code
Definition: truemotion2.c:105
int * U2
Definition: truemotion2.c:84
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:212
static int tm2_read_tree(TM2Context *ctx, uint32_t prefix, int length, TM2Huff *huff)
Definition: truemotion2.c:114
VLC vlc
table for FFmpeg bitstream reader
Definition: truemotion2.c:93
bitstream reader API header.
static void tm2_hi_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:492
static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, int buf_size)
Definition: truemotion2.c:285
#define av_log(a,...)
static void tm2_update_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:657
#define U(x)
Definition: vp56_arith.h:37
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:588
int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
Definition: mem.c:215
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int val_bits
length of literal
Definition: truemotion2.c:103
#define AVERROR(e)
Definition: error.h:43
static int tm2_build_huff_table(TM2Context *ctx, TM2Codes *code)
Definition: truemotion2.c:145
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:148
static void tm2_low_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:535
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:175
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static int tm2_decode_blocks(TM2Context *ctx, AVFrame *p)
Definition: truemotion2.c:751
int * tokens[TM2_NUM_STREAMS]
Definition: truemotion2.c:72
#define t1
Definition: regdef.h:29
GLsizei GLsizei * length
Definition: opengl_enc.c:115
const char * name
Name of the codec implementation.
Definition: avcodec.h:3479
int * clast
Definition: truemotion2.c:80
static const uint8_t offset[127][2]
Definition: vf_spp.c:92
Libavcodec external API header.
Definition: get_bits.h:63
AVFrame * pic
Definition: truemotion2.c:63
int * V1
Definition: truemotion2.c:84
int * V2_base
Definition: truemotion2.c:83
int max_num
total number of codes
Definition: truemotion2.c:108
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame)
Identical in function to av_frame_make_writable(), except it uses ff_get_buffer() to allocate the buf...
Definition: utils.c:1087
int bits
Definition: truemotion2.c:94
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:242
float y
static int GET_TOK(TM2Context *ctx, int type)
Definition: truemotion2.c:389
int width
picture width / height.
Definition: avcodec.h:1681
#define TM2_OLD_HEADER_MAGIC
Definition: truemotion2.c:238
int uv_stride
Definition: truemotion2.c:85
uint8_t * buffer
Definition: truemotion2.c:68
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:555
float u
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:66
static int tm2_get_token(GetBitContext *gb, TM2Codes *code)
Definition: truemotion2.c:229
int CD[4]
Definition: truemotion2.c:78
int * nums
literals
Definition: truemotion2.c:109
int nodes
total number of nodes in tree
Definition: truemotion2.c:106
static av_cold int decode_end(AVCodecContext *avctx)
Definition: truemotion2.c:992
int * Y1
Definition: truemotion2.c:84
int * V1_base
Definition: truemotion2.c:83
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:188
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:199
#define TM2_INIT_POINTERS_2()
Definition: truemotion2.c:422
main external API structure.
Definition: avcodec.h:1502
static void tm2_still_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:622
#define TM2_DELTAS
Definition: truemotion2.c:36
#define init_vlc(vlc, nb_bits, nb_codes,bits, bits_wrap, bits_size,codes, codes_wrap, codes_size,flags)
Definition: get_bits.h:457
void * buf
Definition: avisynth_c.h:553
GLint GLenum type
Definition: opengl_enc.c:105
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:304
#define TM2_INIT_POINTERS()
Definition: truemotion2.c:408
int * Y1_base
Definition: truemotion2.c:83
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:410
static void tm2_med_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:513
static void tm2_motion_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:697
uint32_t * bits
codes
Definition: truemotion2.c:110
int length
Definition: truemotion2.c:96
static unsigned int get_bits_long(GetBitContext *s, int n)
Read 0-32 bits.
Definition: get_bits.h:337
static void tm2_apply_deltas(TM2Context *ctx, int *Y, int stride, int *deltas, int *last)
Definition: truemotion2.c:442
#define TM2_ESCAPE
Definition: truemotion2.c:35
void * av_calloc(size_t nmemb, size_t size)
Allocate a block of nmemb * size bytes with alignment suitable for all memory accesses (including vec...
Definition: mem.c:260
#define TM2_RECALC_BLOCK(CHR, stride, last, CD)
Definition: truemotion2.c:435
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:182
int * U1
Definition: truemotion2.c:84
int * Y2
Definition: truemotion2.c:84
static int decode(AVCodecContext *avctx, void *data, int *got_sub, AVPacket *avpkt)
Definition: ccaption_dec.c:523
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:105
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:280
int y_stride
Definition: truemotion2.c:85
static void tm2_null_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:576
int deltas[TM2_NUM_STREAMS][TM2_DELTAS]
Definition: truemotion2.c:75
int * lens
codelengths
Definition: truemotion2.c:111
static av_cold int decode_init(AVCodecContext *avctx)
Definition: truemotion2.c:928
int * Y2_base
Definition: truemotion2.c:83
int buffer_size
Definition: truemotion2.c:69
#define TM2_NEW_HEADER_MAGIC
Definition: truemotion2.c:239
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
void * priv_data
Definition: avcodec.h:1544
int * U2_base
Definition: truemotion2.c:83
TM2_BLOCKS
Definition: truemotion2.c:51
static av_always_inline int diff(const uint32_t a, const uint32_t b)
#define av_free(p)
GetBitContext gb
Definition: truemotion2.c:65
int len
VLC_TYPE(* table)[2]
code, bits
Definition: get_bits.h:65
int tok_lens[TM2_NUM_STREAMS]
Definition: truemotion2.c:73
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:237
static void tm2_high_chroma(int *data, int stride, int *last, int *CD, int *deltas)
Definition: truemotion2.c:460
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> out
#define av_freep(p)
Huffman codes for each of streams.
Definition: truemotion2.c:92
#define av_malloc_array(a, b)
#define stride
int tok_ptrs[TM2_NUM_STREAMS]
Definition: truemotion2.c:74
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
This structure stores compressed data.
Definition: avcodec.h:1400
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:359
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:857
BswapDSPContext bdsp
Definition: truemotion2.c:66
#define t2
Definition: regdef.h:30
Predicted.
Definition: avutil.h:267
#define V
Definition: avdct.c:30
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: truemotion2.c:873