FFmpeg
mpeg12dec.c
Go to the documentation of this file.
1 /*
2  * MPEG-1/2 decoder
3  * Copyright (c) 2000, 2001 Fabrice Bellard
4  * Copyright (c) 2002-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * MPEG-1/2 decoder
26  */
27 
28 #include "config_components.h"
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 #include <inttypes.h>
32 
33 #include "libavutil/attributes.h"
34 #include "libavutil/emms.h"
35 #include "libavutil/imgutils.h"
36 #include "libavutil/internal.h"
37 #include "libavutil/mem_internal.h"
38 #include "libavutil/reverse.h"
39 #include "libavutil/stereo3d.h"
40 #include "libavutil/timecode.h"
41 
42 #include "avcodec.h"
43 #include "codec_internal.h"
44 #include "decode.h"
45 #include "error_resilience.h"
46 #include "hwaccel_internal.h"
47 #include "hwconfig.h"
48 #include "idctdsp.h"
49 #include "internal.h"
50 #include "mpeg_er.h"
51 #include "mpeg12.h"
52 #include "mpeg12codecs.h"
53 #include "mpeg12data.h"
54 #include "mpeg12dec.h"
55 #include "mpegutils.h"
56 #include "mpegvideo.h"
57 #include "mpegvideodata.h"
58 #include "mpegvideodec.h"
59 #include "profiles.h"
60 #include "startcode.h"
61 #include "thread.h"
62 
63 #define A53_MAX_CC_COUNT 2000
64 
70 };
71 
72 typedef struct Mpeg1Context {
74  int mpeg_enc_ctx_allocated; /* true if decoding context allocated */
75  int repeat_field; /* true if we must repeat the field */
76  AVPanScan pan_scan; /* some temporary storage for the panscan */
81  uint8_t afd;
82  int has_afd;
87  AVRational frame_rate_ext; /* MPEG-2 specific framerate modificator */
88  unsigned frame_rate_index;
89  int sync; /* Did we reach a sync point like a GOP/SEQ/KEYFrame? */
91  int tmpgexs;
94  int64_t timecode_frame_start; /*< GOP timecode frame start number, in non drop frame format */
95 } Mpeg1Context;
96 
97 #define MB_TYPE_ZERO_MV 0x20000000
98 
99 static const uint32_t ptype2mb_type[7] = {
102  MB_TYPE_L0,
107 };
108 
109 static const uint32_t btype2mb_type[11] = {
111  MB_TYPE_L1,
113  MB_TYPE_L0,
115  MB_TYPE_L0L1,
121 };
122 
123 /* as H.263, but only 17 codes */
124 static int mpeg_decode_motion(MpegEncContext *s, int fcode, int pred)
125 {
126  int code, sign, val, shift;
127 
128  code = get_vlc2(&s->gb, ff_mv_vlc, MV_VLC_BITS, 2);
129  if (code == 0)
130  return pred;
131  if (code < 0)
132  return 0xffff;
133 
134  sign = get_bits1(&s->gb);
135  shift = fcode - 1;
136  val = code;
137  if (shift) {
138  val = (val - 1) << shift;
139  val |= get_bits(&s->gb, shift);
140  val++;
141  }
142  if (sign)
143  val = -val;
144  val += pred;
145 
146  /* modulo decoding */
147  return sign_extend(val, 5 + shift);
148 }
149 
150 #define MAX_INDEX (64 - 1)
151 #define check_scantable_index(ctx, x) \
152  do { \
153  if ((x) > MAX_INDEX) { \
154  av_log(ctx->avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n", \
155  ctx->mb_x, ctx->mb_y); \
156  return AVERROR_INVALIDDATA; \
157  } \
158  } while (0)
159 
161  int16_t *block, int n)
162 {
163  int level, i, j, run;
164  uint8_t *const scantable = s->intra_scantable.permutated;
165  const uint16_t *quant_matrix = s->inter_matrix;
166  const int qscale = s->qscale;
167 
168  {
169  OPEN_READER(re, &s->gb);
170  i = -1;
171  // special case for first coefficient, no need to add second VLC table
172  UPDATE_CACHE(re, &s->gb);
173  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
174  level = (3 * qscale * quant_matrix[0]) >> 5;
175  level = (level - 1) | 1;
176  if (GET_CACHE(re, &s->gb) & 0x40000000)
177  level = -level;
178  block[0] = level;
179  i++;
180  SKIP_BITS(re, &s->gb, 2);
181  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
182  goto end;
183  }
184  /* now quantify & encode AC coefficients */
185  for (;;) {
186  GET_RL_VLC(level, run, re, &s->gb, ff_mpeg1_rl_vlc,
187  TEX_VLC_BITS, 2, 0);
188 
189  if (level != 0) {
190  i += run;
191  if (i > MAX_INDEX)
192  break;
193  j = scantable[i];
194  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
195  level = (level - 1) | 1;
196  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
197  SHOW_SBITS(re, &s->gb, 1);
198  SKIP_BITS(re, &s->gb, 1);
199  } else {
200  /* escape */
201  run = SHOW_UBITS(re, &s->gb, 6) + 1;
202  LAST_SKIP_BITS(re, &s->gb, 6);
203  UPDATE_CACHE(re, &s->gb);
204  level = SHOW_SBITS(re, &s->gb, 8);
205  SKIP_BITS(re, &s->gb, 8);
206  if (level == -128) {
207  level = SHOW_UBITS(re, &s->gb, 8) - 256;
208  SKIP_BITS(re, &s->gb, 8);
209  } else if (level == 0) {
210  level = SHOW_UBITS(re, &s->gb, 8);
211  SKIP_BITS(re, &s->gb, 8);
212  }
213  i += run;
214  if (i > MAX_INDEX)
215  break;
216  j = scantable[i];
217  if (level < 0) {
218  level = -level;
219  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
220  level = (level - 1) | 1;
221  level = -level;
222  } else {
223  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
224  level = (level - 1) | 1;
225  }
226  }
227 
228  block[j] = level;
229  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
230  break;
231  UPDATE_CACHE(re, &s->gb);
232  }
233 end:
234  LAST_SKIP_BITS(re, &s->gb, 2);
235  CLOSE_READER(re, &s->gb);
236  }
237 
239 
240  s->block_last_index[n] = i;
241  return 0;
242 }
243 
245  int16_t *block, int n)
246 {
247  int level, i, j, run;
248  uint8_t *const scantable = s->intra_scantable.permutated;
249  const uint16_t *quant_matrix;
250  const int qscale = s->qscale;
251  int mismatch;
252 
253  mismatch = 1;
254 
255  {
256  OPEN_READER(re, &s->gb);
257  i = -1;
258  if (n < 4)
259  quant_matrix = s->inter_matrix;
260  else
261  quant_matrix = s->chroma_inter_matrix;
262 
263  // Special case for first coefficient, no need to add second VLC table.
264  UPDATE_CACHE(re, &s->gb);
265  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
266  level = (3 * qscale * quant_matrix[0]) >> 5;
267  if (GET_CACHE(re, &s->gb) & 0x40000000)
268  level = -level;
269  block[0] = level;
270  mismatch ^= level;
271  i++;
272  SKIP_BITS(re, &s->gb, 2);
273  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
274  goto end;
275  }
276 
277  /* now quantify & encode AC coefficients */
278  for (;;) {
279  GET_RL_VLC(level, run, re, &s->gb, ff_mpeg1_rl_vlc,
280  TEX_VLC_BITS, 2, 0);
281 
282  if (level != 0) {
283  i += run;
284  if (i > MAX_INDEX)
285  break;
286  j = scantable[i];
287  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
288  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
289  SHOW_SBITS(re, &s->gb, 1);
290  SKIP_BITS(re, &s->gb, 1);
291  } else {
292  /* escape */
293  run = SHOW_UBITS(re, &s->gb, 6) + 1;
294  LAST_SKIP_BITS(re, &s->gb, 6);
295  UPDATE_CACHE(re, &s->gb);
296  level = SHOW_SBITS(re, &s->gb, 12);
297  SKIP_BITS(re, &s->gb, 12);
298 
299  i += run;
300  if (i > MAX_INDEX)
301  break;
302  j = scantable[i];
303  if (level < 0) {
304  level = ((-level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
305  level = -level;
306  } else {
307  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
308  }
309  }
310 
311  mismatch ^= level;
312  block[j] = level;
313  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
314  break;
315  UPDATE_CACHE(re, &s->gb);
316  }
317 end:
318  LAST_SKIP_BITS(re, &s->gb, 2);
319  CLOSE_READER(re, &s->gb);
320  }
321  block[63] ^= (mismatch & 1);
322 
324 
325  s->block_last_index[n] = i;
326  return 0;
327 }
328 
330  int16_t *block, int n)
331 {
332  int level, dc, diff, i, j, run;
333  int component;
334  const RL_VLC_ELEM *rl_vlc;
335  uint8_t *const scantable = s->intra_scantable.permutated;
336  const uint16_t *quant_matrix;
337  const int qscale = s->qscale;
338  int mismatch;
339 
340  /* DC coefficient */
341  if (n < 4) {
342  quant_matrix = s->intra_matrix;
343  component = 0;
344  } else {
345  quant_matrix = s->chroma_intra_matrix;
346  component = (n & 1) + 1;
347  }
348  diff = decode_dc(&s->gb, component);
349  dc = s->last_dc[component];
350  dc += diff;
351  s->last_dc[component] = dc;
352  block[0] = dc * (1 << (3 - s->intra_dc_precision));
353  ff_tlog(s->avctx, "dc=%d\n", block[0]);
354  mismatch = block[0] ^ 1;
355  i = 0;
356  if (s->intra_vlc_format)
358  else
360 
361  {
362  OPEN_READER(re, &s->gb);
363  /* now quantify & encode AC coefficients */
364  for (;;) {
365  UPDATE_CACHE(re, &s->gb);
366  GET_RL_VLC(level, run, re, &s->gb, rl_vlc,
367  TEX_VLC_BITS, 2, 0);
368 
369  if (level == 127) {
370  break;
371  } else if (level != 0) {
372  i += run;
373  if (i > MAX_INDEX)
374  break;
375  j = scantable[i];
376  level = (level * qscale * quant_matrix[j]) >> 4;
377  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
378  SHOW_SBITS(re, &s->gb, 1);
379  LAST_SKIP_BITS(re, &s->gb, 1);
380  } else {
381  /* escape */
382  run = SHOW_UBITS(re, &s->gb, 6) + 1;
383  SKIP_BITS(re, &s->gb, 6);
384  level = SHOW_SBITS(re, &s->gb, 12);
385  LAST_SKIP_BITS(re, &s->gb, 12);
386  i += run;
387  if (i > MAX_INDEX)
388  break;
389  j = scantable[i];
390  if (level < 0) {
391  level = (-level * qscale * quant_matrix[j]) >> 4;
392  level = -level;
393  } else {
394  level = (level * qscale * quant_matrix[j]) >> 4;
395  }
396  }
397 
398  mismatch ^= level;
399  block[j] = level;
400  }
401  CLOSE_READER(re, &s->gb);
402  }
403  block[63] ^= mismatch & 1;
404 
406 
407  s->block_last_index[n] = i;
408  return 0;
409 }
410 
411 /******************************************/
412 /* decoding */
413 
414 static inline int get_dmv(MpegEncContext *s)
415 {
416  if (get_bits1(&s->gb))
417  return 1 - (get_bits1(&s->gb) << 1);
418  else
419  return 0;
420 }
421 
422 /* motion type (for MPEG-2) */
423 #define MT_FIELD 1
424 #define MT_FRAME 2
425 #define MT_16X8 2
426 #define MT_DMV 3
427 
428 static int mpeg_decode_mb(MpegEncContext *s, int16_t block[12][64])
429 {
430  int i, j, k, cbp, val, mb_type, motion_type;
431  const int mb_block_count = 4 + (1 << s->chroma_format);
432  int ret;
433 
434  ff_tlog(s->avctx, "decode_mb: x=%d y=%d\n", s->mb_x, s->mb_y);
435 
436  av_assert2(s->mb_skipped == 0);
437 
438  if (s->mb_skip_run-- != 0) {
439  if (s->pict_type == AV_PICTURE_TYPE_P) {
440  s->mb_skipped = 1;
441  s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride] =
443  } else {
444  int mb_type;
445 
446  if (s->mb_x)
447  mb_type = s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride - 1];
448  else
449  // FIXME not sure if this is allowed in MPEG at all
450  mb_type = s->current_picture.mb_type[s->mb_width + (s->mb_y - 1) * s->mb_stride - 1];
451  if (IS_INTRA(mb_type)) {
452  av_log(s->avctx, AV_LOG_ERROR, "skip with previntra\n");
453  return AVERROR_INVALIDDATA;
454  }
455  s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride] =
456  mb_type | MB_TYPE_SKIP;
457 
458  if ((s->mv[0][0][0] | s->mv[0][0][1] | s->mv[1][0][0] | s->mv[1][0][1]) == 0)
459  s->mb_skipped = 1;
460  }
461 
462  return 0;
463  }
464 
465  switch (s->pict_type) {
466  default:
467  case AV_PICTURE_TYPE_I:
468  if (get_bits1(&s->gb) == 0) {
469  if (get_bits1(&s->gb) == 0) {
470  av_log(s->avctx, AV_LOG_ERROR,
471  "Invalid mb type in I-frame at %d %d\n",
472  s->mb_x, s->mb_y);
473  return AVERROR_INVALIDDATA;
474  }
475  mb_type = MB_TYPE_QUANT | MB_TYPE_INTRA;
476  } else {
477  mb_type = MB_TYPE_INTRA;
478  }
479  break;
480  case AV_PICTURE_TYPE_P:
481  mb_type = get_vlc2(&s->gb, ff_mb_ptype_vlc, MB_PTYPE_VLC_BITS, 1);
482  if (mb_type < 0) {
483  av_log(s->avctx, AV_LOG_ERROR,
484  "Invalid mb type in P-frame at %d %d\n", s->mb_x, s->mb_y);
485  return AVERROR_INVALIDDATA;
486  }
487  mb_type = ptype2mb_type[mb_type];
488  break;
489  case AV_PICTURE_TYPE_B:
490  mb_type = get_vlc2(&s->gb, ff_mb_btype_vlc, MB_BTYPE_VLC_BITS, 1);
491  if (mb_type < 0) {
492  av_log(s->avctx, AV_LOG_ERROR,
493  "Invalid mb type in B-frame at %d %d\n", s->mb_x, s->mb_y);
494  return AVERROR_INVALIDDATA;
495  }
496  mb_type = btype2mb_type[mb_type];
497  break;
498  }
499  ff_tlog(s->avctx, "mb_type=%x\n", mb_type);
500 // motion_type = 0; /* avoid warning */
501  if (IS_INTRA(mb_type)) {
502  s->bdsp.clear_blocks(s->block[0]);
503 
504  if (!s->chroma_y_shift)
505  s->bdsp.clear_blocks(s->block[6]);
506 
507  /* compute DCT type */
508  // FIXME: add an interlaced_dct coded var?
509  if (s->picture_structure == PICT_FRAME &&
510  !s->frame_pred_frame_dct)
511  s->interlaced_dct = get_bits1(&s->gb);
512 
513  if (IS_QUANT(mb_type))
514  s->qscale = mpeg_get_qscale(s);
515 
516  if (s->concealment_motion_vectors) {
517  /* just parse them */
518  if (s->picture_structure != PICT_FRAME)
519  skip_bits1(&s->gb); /* field select */
520 
521  s->mv[0][0][0] =
522  s->last_mv[0][0][0] =
523  s->last_mv[0][1][0] = mpeg_decode_motion(s, s->mpeg_f_code[0][0],
524  s->last_mv[0][0][0]);
525  s->mv[0][0][1] =
526  s->last_mv[0][0][1] =
527  s->last_mv[0][1][1] = mpeg_decode_motion(s, s->mpeg_f_code[0][1],
528  s->last_mv[0][0][1]);
529 
530  check_marker(s->avctx, &s->gb, "after concealment_motion_vectors");
531  } else {
532  /* reset mv prediction */
533  memset(s->last_mv, 0, sizeof(s->last_mv));
534  }
535  s->mb_intra = 1;
536 
537  if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
538  for (i = 0; i < mb_block_count; i++)
539  if ((ret = mpeg2_decode_block_intra(s, *s->pblocks[i], i)) < 0)
540  return ret;
541  } else {
542  for (i = 0; i < 6; i++) {
544  s->intra_matrix,
545  s->intra_scantable.permutated,
546  s->last_dc, *s->pblocks[i],
547  i, s->qscale);
548  if (ret < 0) {
549  av_log(s->avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n",
550  s->mb_x, s->mb_y);
551  return ret;
552  }
553 
554  s->block_last_index[i] = ret;
555  }
556  }
557  } else {
558  if (mb_type & MB_TYPE_ZERO_MV) {
559  av_assert2(mb_type & MB_TYPE_CBP);
560 
561  s->mv_dir = MV_DIR_FORWARD;
562  if (s->picture_structure == PICT_FRAME) {
563  if (s->picture_structure == PICT_FRAME
564  && !s->frame_pred_frame_dct)
565  s->interlaced_dct = get_bits1(&s->gb);
566  s->mv_type = MV_TYPE_16X16;
567  } else {
568  s->mv_type = MV_TYPE_FIELD;
569  mb_type |= MB_TYPE_INTERLACED;
570  s->field_select[0][0] = s->picture_structure - 1;
571  }
572 
573  if (IS_QUANT(mb_type))
574  s->qscale = mpeg_get_qscale(s);
575 
576  s->last_mv[0][0][0] = 0;
577  s->last_mv[0][0][1] = 0;
578  s->last_mv[0][1][0] = 0;
579  s->last_mv[0][1][1] = 0;
580  s->mv[0][0][0] = 0;
581  s->mv[0][0][1] = 0;
582  } else {
583  av_assert2(mb_type & MB_TYPE_L0L1);
584  // FIXME decide if MBs in field pictures are MB_TYPE_INTERLACED
585  /* get additional motion vector type */
586  if (s->picture_structure == PICT_FRAME && s->frame_pred_frame_dct) {
587  motion_type = MT_FRAME;
588  } else {
589  motion_type = get_bits(&s->gb, 2);
590  if (s->picture_structure == PICT_FRAME && HAS_CBP(mb_type))
591  s->interlaced_dct = get_bits1(&s->gb);
592  }
593 
594  if (IS_QUANT(mb_type))
595  s->qscale = mpeg_get_qscale(s);
596 
597  /* motion vectors */
598  s->mv_dir = (mb_type >> 13) & 3;
599  ff_tlog(s->avctx, "motion_type=%d\n", motion_type);
600  switch (motion_type) {
601  case MT_FRAME: /* or MT_16X8 */
602  if (s->picture_structure == PICT_FRAME) {
603  mb_type |= MB_TYPE_16x16;
604  s->mv_type = MV_TYPE_16X16;
605  for (i = 0; i < 2; i++) {
606  if (USES_LIST(mb_type, i)) {
607  /* MT_FRAME */
608  s->mv[i][0][0] =
609  s->last_mv[i][0][0] =
610  s->last_mv[i][1][0] =
611  mpeg_decode_motion(s, s->mpeg_f_code[i][0],
612  s->last_mv[i][0][0]);
613  s->mv[i][0][1] =
614  s->last_mv[i][0][1] =
615  s->last_mv[i][1][1] =
616  mpeg_decode_motion(s, s->mpeg_f_code[i][1],
617  s->last_mv[i][0][1]);
618  /* full_pel: only for MPEG-1 */
619  if (s->full_pel[i]) {
620  s->mv[i][0][0] *= 2;
621  s->mv[i][0][1] *= 2;
622  }
623  }
624  }
625  } else {
626  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
627  s->mv_type = MV_TYPE_16X8;
628  for (i = 0; i < 2; i++) {
629  if (USES_LIST(mb_type, i)) {
630  /* MT_16X8 */
631  for (j = 0; j < 2; j++) {
632  s->field_select[i][j] = get_bits1(&s->gb);
633  for (k = 0; k < 2; k++) {
634  val = mpeg_decode_motion(s, s->mpeg_f_code[i][k],
635  s->last_mv[i][j][k]);
636  s->last_mv[i][j][k] = val;
637  s->mv[i][j][k] = val;
638  }
639  }
640  }
641  }
642  }
643  break;
644  case MT_FIELD:
645  s->mv_type = MV_TYPE_FIELD;
646  if (s->picture_structure == PICT_FRAME) {
647  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
648  for (i = 0; i < 2; i++) {
649  if (USES_LIST(mb_type, i)) {
650  for (j = 0; j < 2; j++) {
651  s->field_select[i][j] = get_bits1(&s->gb);
652  val = mpeg_decode_motion(s, s->mpeg_f_code[i][0],
653  s->last_mv[i][j][0]);
654  s->last_mv[i][j][0] = val;
655  s->mv[i][j][0] = val;
656  ff_tlog(s->avctx, "fmx=%d\n", val);
657  val = mpeg_decode_motion(s, s->mpeg_f_code[i][1],
658  s->last_mv[i][j][1] >> 1);
659  s->last_mv[i][j][1] = 2 * val;
660  s->mv[i][j][1] = val;
661  ff_tlog(s->avctx, "fmy=%d\n", val);
662  }
663  }
664  }
665  } else {
666  av_assert0(!s->progressive_sequence);
667  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
668  for (i = 0; i < 2; i++) {
669  if (USES_LIST(mb_type, i)) {
670  s->field_select[i][0] = get_bits1(&s->gb);
671  for (k = 0; k < 2; k++) {
672  val = mpeg_decode_motion(s, s->mpeg_f_code[i][k],
673  s->last_mv[i][0][k]);
674  s->last_mv[i][0][k] = val;
675  s->last_mv[i][1][k] = val;
676  s->mv[i][0][k] = val;
677  }
678  }
679  }
680  }
681  break;
682  case MT_DMV:
683  if (s->progressive_sequence){
684  av_log(s->avctx, AV_LOG_ERROR, "MT_DMV in progressive_sequence\n");
685  return AVERROR_INVALIDDATA;
686  }
687  s->mv_type = MV_TYPE_DMV;
688  for (i = 0; i < 2; i++) {
689  if (USES_LIST(mb_type, i)) {
690  int dmx, dmy, mx, my, m;
691  const int my_shift = s->picture_structure == PICT_FRAME;
692 
693  mx = mpeg_decode_motion(s, s->mpeg_f_code[i][0],
694  s->last_mv[i][0][0]);
695  s->last_mv[i][0][0] = mx;
696  s->last_mv[i][1][0] = mx;
697  dmx = get_dmv(s);
698  my = mpeg_decode_motion(s, s->mpeg_f_code[i][1],
699  s->last_mv[i][0][1] >> my_shift);
700  dmy = get_dmv(s);
701 
702 
703  s->last_mv[i][0][1] = my * (1 << my_shift);
704  s->last_mv[i][1][1] = my * (1 << my_shift);
705 
706  s->mv[i][0][0] = mx;
707  s->mv[i][0][1] = my;
708  s->mv[i][1][0] = mx; // not used
709  s->mv[i][1][1] = my; // not used
710 
711  if (s->picture_structure == PICT_FRAME) {
712  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
713 
714  // m = 1 + 2 * s->top_field_first;
715  m = s->top_field_first ? 1 : 3;
716 
717  /* top -> top pred */
718  s->mv[i][2][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
719  s->mv[i][2][1] = ((my * m + (my > 0)) >> 1) + dmy - 1;
720  m = 4 - m;
721  s->mv[i][3][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
722  s->mv[i][3][1] = ((my * m + (my > 0)) >> 1) + dmy + 1;
723  } else {
724  mb_type |= MB_TYPE_16x16;
725 
726  s->mv[i][2][0] = ((mx + (mx > 0)) >> 1) + dmx;
727  s->mv[i][2][1] = ((my + (my > 0)) >> 1) + dmy;
728  if (s->picture_structure == PICT_TOP_FIELD)
729  s->mv[i][2][1]--;
730  else
731  s->mv[i][2][1]++;
732  }
733  }
734  }
735  break;
736  default:
737  av_log(s->avctx, AV_LOG_ERROR,
738  "00 motion_type at %d %d\n", s->mb_x, s->mb_y);
739  return AVERROR_INVALIDDATA;
740  }
741  }
742 
743  s->mb_intra = 0;
744  if (HAS_CBP(mb_type)) {
745  s->bdsp.clear_blocks(s->block[0]);
746 
747  cbp = get_vlc2(&s->gb, ff_mb_pat_vlc, MB_PAT_VLC_BITS, 1);
748  if (mb_block_count > 6) {
749  cbp *= 1 << mb_block_count - 6;
750  cbp |= get_bits(&s->gb, mb_block_count - 6);
751  s->bdsp.clear_blocks(s->block[6]);
752  }
753  if (cbp <= 0) {
754  av_log(s->avctx, AV_LOG_ERROR,
755  "invalid cbp %d at %d %d\n", cbp, s->mb_x, s->mb_y);
756  return AVERROR_INVALIDDATA;
757  }
758 
759  if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
760  cbp <<= 12 - mb_block_count;
761 
762  for (i = 0; i < mb_block_count; i++) {
763  if (cbp & (1 << 11)) {
764  if ((ret = mpeg2_decode_block_non_intra(s, *s->pblocks[i], i)) < 0)
765  return ret;
766  } else {
767  s->block_last_index[i] = -1;
768  }
769  cbp += cbp;
770  }
771  } else {
772  for (i = 0; i < 6; i++) {
773  if (cbp & 32) {
774  if ((ret = mpeg1_decode_block_inter(s, *s->pblocks[i], i)) < 0)
775  return ret;
776  } else {
777  s->block_last_index[i] = -1;
778  }
779  cbp += cbp;
780  }
781  }
782  } else {
783  for (i = 0; i < 12; i++)
784  s->block_last_index[i] = -1;
785  }
786  }
787 
788  s->current_picture.mb_type[s->mb_x + s->mb_y * s->mb_stride] = mb_type;
789 
790  return 0;
791 }
792 
794 {
795  Mpeg1Context *s = avctx->priv_data;
796  MpegEncContext *s2 = &s->mpeg_enc_ctx;
797 
798  if ( avctx->codec_tag != AV_RL32("VCR2")
799  && avctx->codec_tag != AV_RL32("BW10"))
800  avctx->coded_width = avctx->coded_height = 0; // do not trust dimensions from input
801  ff_mpv_decode_init(s2, avctx);
802 
804 
805  s2->chroma_format = 1;
806  s->mpeg_enc_ctx_allocated = 0;
807  s->repeat_field = 0;
808  avctx->color_range = AVCOL_RANGE_MPEG;
809  return 0;
810 }
811 
812 #if HAVE_THREADS
813 static int mpeg_decode_update_thread_context(AVCodecContext *avctx,
814  const AVCodecContext *avctx_from)
815 {
816  Mpeg1Context *ctx = avctx->priv_data, *ctx_from = avctx_from->priv_data;
817  MpegEncContext *s = &ctx->mpeg_enc_ctx, *s1 = &ctx_from->mpeg_enc_ctx;
818  int err;
819 
820  if (avctx == avctx_from ||
821  !ctx_from->mpeg_enc_ctx_allocated ||
822  !s1->context_initialized)
823  return 0;
824 
825  err = ff_mpeg_update_thread_context(avctx, avctx_from);
826  if (err)
827  return err;
828 
829  if (!ctx->mpeg_enc_ctx_allocated)
830  memcpy(s + 1, s1 + 1, sizeof(Mpeg1Context) - sizeof(MpegEncContext));
831 
832  return 0;
833 }
834 #endif
835 
837 #if CONFIG_MPEG1_NVDEC_HWACCEL
839 #endif
840 #if CONFIG_MPEG1_VDPAU_HWACCEL
842 #endif
845 };
846 
848 #if CONFIG_MPEG2_NVDEC_HWACCEL
850 #endif
851 #if CONFIG_MPEG2_VDPAU_HWACCEL
853 #endif
854 #if CONFIG_MPEG2_DXVA2_HWACCEL
856 #endif
857 #if CONFIG_MPEG2_D3D11VA_HWACCEL
860 #endif
861 #if CONFIG_MPEG2_D3D12VA_HWACCEL
863 #endif
864 #if CONFIG_MPEG2_VAAPI_HWACCEL
866 #endif
867 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
869 #endif
872 };
873 
874 static const enum AVPixelFormat mpeg12_pixfmt_list_422[] = {
877 };
878 
879 static const enum AVPixelFormat mpeg12_pixfmt_list_444[] = {
882 };
883 
885 {
886  Mpeg1Context *s1 = avctx->priv_data;
887  MpegEncContext *s = &s1->mpeg_enc_ctx;
888  const enum AVPixelFormat *pix_fmts;
889 
890  if (CONFIG_GRAY && (avctx->flags & AV_CODEC_FLAG_GRAY))
891  return AV_PIX_FMT_GRAY8;
892 
893  if (s->chroma_format < 2)
897  else if (s->chroma_format == 2)
899  else
901 
902  return ff_get_format(avctx, pix_fmts);
903 }
904 
905 /* Call this function when we know all parameters.
906  * It may be called in different places for MPEG-1 and MPEG-2. */
908 {
909  Mpeg1Context *s1 = avctx->priv_data;
910  MpegEncContext *s = &s1->mpeg_enc_ctx;
911  int ret;
912 
913  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
914  // MPEG-1 aspect
915  AVRational aspect_inv = av_d2q(ff_mpeg1_aspect[s1->aspect_ratio_info], 255);
916  avctx->sample_aspect_ratio = (AVRational) { aspect_inv.den, aspect_inv.num };
917  } else { // MPEG-2
918  // MPEG-2 aspect
919  if (s1->aspect_ratio_info > 1) {
920  AVRational dar =
921  av_mul_q(av_div_q(ff_mpeg2_aspect[s1->aspect_ratio_info],
922  (AVRational) { s1->pan_scan.width,
923  s1->pan_scan.height }),
924  (AVRational) { s->width, s->height });
925 
926  /* We ignore the spec here and guess a bit as reality does not
927  * match the spec, see for example res_change_ffmpeg_aspect.ts
928  * and sequence-display-aspect.mpg.
929  * issue1613, 621, 562 */
930  if ((s1->pan_scan.width == 0) || (s1->pan_scan.height == 0) ||
931  (av_cmp_q(dar, (AVRational) { 4, 3 }) &&
932  av_cmp_q(dar, (AVRational) { 16, 9 }))) {
933  s->avctx->sample_aspect_ratio =
934  av_div_q(ff_mpeg2_aspect[s1->aspect_ratio_info],
935  (AVRational) { s->width, s->height });
936  } else {
937  s->avctx->sample_aspect_ratio =
938  av_div_q(ff_mpeg2_aspect[s1->aspect_ratio_info],
939  (AVRational) { s1->pan_scan.width, s1->pan_scan.height });
940 // issue1613 4/3 16/9 -> 16/9
941 // res_change_ffmpeg_aspect.ts 4/3 225/44 ->4/3
942 // widescreen-issue562.mpg 4/3 16/9 -> 16/9
943 // s->avctx->sample_aspect_ratio = av_mul_q(s->avctx->sample_aspect_ratio, (AVRational) {s->width, s->height});
944  ff_dlog(avctx, "aspect A %d/%d\n",
945  ff_mpeg2_aspect[s1->aspect_ratio_info].num,
946  ff_mpeg2_aspect[s1->aspect_ratio_info].den);
947  ff_dlog(avctx, "aspect B %d/%d\n", s->avctx->sample_aspect_ratio.num,
948  s->avctx->sample_aspect_ratio.den);
949  }
950  } else {
951  s->avctx->sample_aspect_ratio =
952  ff_mpeg2_aspect[s1->aspect_ratio_info];
953  }
954  } // MPEG-2
955 
956  if (av_image_check_sar(s->width, s->height,
957  avctx->sample_aspect_ratio) < 0) {
958  av_log(avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
959  avctx->sample_aspect_ratio.num,
960  avctx->sample_aspect_ratio.den);
961  avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
962  }
963 
964  if ((s1->mpeg_enc_ctx_allocated == 0) ||
965  avctx->coded_width != s->width ||
966  avctx->coded_height != s->height ||
967  s1->save_width != s->width ||
968  s1->save_height != s->height ||
969  av_cmp_q(s1->save_aspect, s->avctx->sample_aspect_ratio) ||
970  (s1->save_progressive_seq != s->progressive_sequence && FFALIGN(s->height, 16) != FFALIGN(s->height, 32)) ||
971  0) {
972  if (s1->mpeg_enc_ctx_allocated) {
974  s1->mpeg_enc_ctx_allocated = 0;
975  }
976 
977  ret = ff_set_dimensions(avctx, s->width, s->height);
978  if (ret < 0)
979  return ret;
980 
981  if (avctx->codec_id == AV_CODEC_ID_MPEG2VIDEO && s->bit_rate &&
982  (s->bit_rate != 0x3FFFF*400)) {
983  avctx->rc_max_rate = s->bit_rate;
984  } else if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && s->bit_rate &&
985  (s->bit_rate != 0x3FFFF*400 || s->vbv_delay != 0xFFFF)) {
986  avctx->bit_rate = s->bit_rate;
987  }
988  s1->save_aspect = s->avctx->sample_aspect_ratio;
989  s1->save_width = s->width;
990  s1->save_height = s->height;
991  s1->save_progressive_seq = s->progressive_sequence;
992 
993  /* low_delay may be forced, in this case we will have B-frames
994  * that behave like P-frames. */
995  avctx->has_b_frames = !s->low_delay;
996 
997  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
998  // MPEG-1 fps
999  avctx->framerate = ff_mpeg12_frame_rate_tab[s1->frame_rate_index];
1000 #if FF_API_TICKS_PER_FRAME
1002  avctx->ticks_per_frame = 1;
1004 #endif
1005 
1007  } else { // MPEG-2
1008  // MPEG-2 fps
1009  av_reduce(&s->avctx->framerate.num,
1010  &s->avctx->framerate.den,
1011  ff_mpeg12_frame_rate_tab[s1->frame_rate_index].num * s1->frame_rate_ext.num,
1012  ff_mpeg12_frame_rate_tab[s1->frame_rate_index].den * s1->frame_rate_ext.den,
1013  1 << 30);
1014 #if FF_API_TICKS_PER_FRAME
1016  avctx->ticks_per_frame = 2;
1018 #endif
1019 
1020  switch (s->chroma_format) {
1021  case 1: avctx->chroma_sample_location = AVCHROMA_LOC_LEFT; break;
1022  case 2:
1023  case 3: avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT; break;
1024  default: av_assert0(0);
1025  }
1026  } // MPEG-2
1027 
1028  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
1029 
1030  if ((ret = ff_mpv_common_init(s)) < 0)
1031  return ret;
1032 
1033  s1->mpeg_enc_ctx_allocated = 1;
1034  }
1035  return 0;
1036 }
1037 
1038 static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf,
1039  int buf_size)
1040 {
1041  Mpeg1Context *s1 = avctx->priv_data;
1042  MpegEncContext *s = &s1->mpeg_enc_ctx;
1043  int ref, f_code, vbv_delay, ret;
1044 
1045  ret = init_get_bits8(&s->gb, buf, buf_size);
1046  if (ret < 0)
1047  return ret;
1048 
1049  ref = get_bits(&s->gb, 10); /* temporal ref */
1050  s->pict_type = get_bits(&s->gb, 3);
1051  if (s->pict_type == 0 || s->pict_type > 3)
1052  return AVERROR_INVALIDDATA;
1053 
1054  vbv_delay = get_bits(&s->gb, 16);
1055  s->vbv_delay = vbv_delay;
1056  if (s->pict_type == AV_PICTURE_TYPE_P ||
1057  s->pict_type == AV_PICTURE_TYPE_B) {
1058  s->full_pel[0] = get_bits1(&s->gb);
1059  f_code = get_bits(&s->gb, 3);
1060  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1061  return AVERROR_INVALIDDATA;
1062  f_code += !f_code;
1063  s->mpeg_f_code[0][0] = f_code;
1064  s->mpeg_f_code[0][1] = f_code;
1065  }
1066  if (s->pict_type == AV_PICTURE_TYPE_B) {
1067  s->full_pel[1] = get_bits1(&s->gb);
1068  f_code = get_bits(&s->gb, 3);
1069  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1070  return AVERROR_INVALIDDATA;
1071  f_code += !f_code;
1072  s->mpeg_f_code[1][0] = f_code;
1073  s->mpeg_f_code[1][1] = f_code;
1074  }
1075 
1076  if (avctx->debug & FF_DEBUG_PICT_INFO)
1077  av_log(avctx, AV_LOG_DEBUG,
1078  "vbv_delay %d, ref %d type:%d\n", vbv_delay, ref, s->pict_type);
1079 
1080  s->y_dc_scale = 8;
1081  s->c_dc_scale = 8;
1082  return 0;
1083 }
1084 
1086 {
1087  MpegEncContext *s = &s1->mpeg_enc_ctx;
1088  int horiz_size_ext, vert_size_ext;
1089  int bit_rate_ext;
1090 
1091  skip_bits(&s->gb, 1); /* profile and level esc*/
1092  s->avctx->profile = get_bits(&s->gb, 3);
1093  s->avctx->level = get_bits(&s->gb, 4);
1094  s->progressive_sequence = get_bits1(&s->gb); /* progressive_sequence */
1095  s->chroma_format = get_bits(&s->gb, 2); /* chroma_format 1=420, 2=422, 3=444 */
1096 
1097  if (!s->chroma_format) {
1098  s->chroma_format = 1;
1099  av_log(s->avctx, AV_LOG_WARNING, "Chroma format invalid\n");
1100  }
1101 
1102  horiz_size_ext = get_bits(&s->gb, 2);
1103  vert_size_ext = get_bits(&s->gb, 2);
1104  s->width |= (horiz_size_ext << 12);
1105  s->height |= (vert_size_ext << 12);
1106  bit_rate_ext = get_bits(&s->gb, 12); /* XXX: handle it */
1107  s->bit_rate += (bit_rate_ext << 18) * 400LL;
1108  check_marker(s->avctx, &s->gb, "after bit rate extension");
1109  s->avctx->rc_buffer_size += get_bits(&s->gb, 8) * 1024 * 16 << 10;
1110 
1111  s->low_delay = get_bits1(&s->gb);
1112  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1113  s->low_delay = 1;
1114 
1115  s1->frame_rate_ext.num = get_bits(&s->gb, 2) + 1;
1116  s1->frame_rate_ext.den = get_bits(&s->gb, 5) + 1;
1117 
1118  ff_dlog(s->avctx, "sequence extension\n");
1119  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1120 
1121  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1122  av_log(s->avctx, AV_LOG_DEBUG,
1123  "profile: %d, level: %d ps: %d cf:%d vbv buffer: %d, bitrate:%"PRId64"\n",
1124  s->avctx->profile, s->avctx->level, s->progressive_sequence, s->chroma_format,
1125  s->avctx->rc_buffer_size, s->bit_rate);
1126 }
1127 
1129 {
1130  MpegEncContext *s = &s1->mpeg_enc_ctx;
1131  int color_description, w, h;
1132 
1133  skip_bits(&s->gb, 3); /* video format */
1134  color_description = get_bits1(&s->gb);
1135  if (color_description) {
1136  s->avctx->color_primaries = get_bits(&s->gb, 8);
1137  s->avctx->color_trc = get_bits(&s->gb, 8);
1138  s->avctx->colorspace = get_bits(&s->gb, 8);
1139  }
1140  w = get_bits(&s->gb, 14);
1141  skip_bits(&s->gb, 1); // marker
1142  h = get_bits(&s->gb, 14);
1143  // remaining 3 bits are zero padding
1144 
1145  s1->pan_scan.width = 16 * w;
1146  s1->pan_scan.height = 16 * h;
1147 
1148  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1149  av_log(s->avctx, AV_LOG_DEBUG, "sde w:%d, h:%d\n", w, h);
1150 }
1151 
1153 {
1154  MpegEncContext *s = &s1->mpeg_enc_ctx;
1155  int i, nofco;
1156 
1157  nofco = 1;
1158  if (s->progressive_sequence) {
1159  if (s->repeat_first_field) {
1160  nofco++;
1161  if (s->top_field_first)
1162  nofco++;
1163  }
1164  } else {
1165  if (s->picture_structure == PICT_FRAME) {
1166  nofco++;
1167  if (s->repeat_first_field)
1168  nofco++;
1169  }
1170  }
1171  for (i = 0; i < nofco; i++) {
1172  s1->pan_scan.position[i][0] = get_sbits(&s->gb, 16);
1173  skip_bits(&s->gb, 1); // marker
1174  s1->pan_scan.position[i][1] = get_sbits(&s->gb, 16);
1175  skip_bits(&s->gb, 1); // marker
1176  }
1177 
1178  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1179  av_log(s->avctx, AV_LOG_DEBUG,
1180  "pde (%"PRId16",%"PRId16") (%"PRId16",%"PRId16") (%"PRId16",%"PRId16")\n",
1181  s1->pan_scan.position[0][0], s1->pan_scan.position[0][1],
1182  s1->pan_scan.position[1][0], s1->pan_scan.position[1][1],
1183  s1->pan_scan.position[2][0], s1->pan_scan.position[2][1]);
1184 }
1185 
1186 static int load_matrix(MpegEncContext *s, uint16_t matrix0[64],
1187  uint16_t matrix1[64], int intra)
1188 {
1189  int i;
1190 
1191  for (i = 0; i < 64; i++) {
1192  int j = s->idsp.idct_permutation[ff_zigzag_direct[i]];
1193  int v = get_bits(&s->gb, 8);
1194  if (v == 0) {
1195  av_log(s->avctx, AV_LOG_ERROR, "matrix damaged\n");
1196  return AVERROR_INVALIDDATA;
1197  }
1198  if (intra && i == 0 && v != 8) {
1199  av_log(s->avctx, AV_LOG_DEBUG, "intra matrix specifies invalid DC quantizer %d, ignoring\n", v);
1200  v = 8; // needed by pink.mpg / issue1046
1201  }
1202  matrix0[j] = v;
1203  if (matrix1)
1204  matrix1[j] = v;
1205  }
1206  return 0;
1207 }
1208 
1210 {
1211  ff_dlog(s->avctx, "matrix extension\n");
1212 
1213  if (get_bits1(&s->gb))
1214  load_matrix(s, s->chroma_intra_matrix, s->intra_matrix, 1);
1215  if (get_bits1(&s->gb))
1216  load_matrix(s, s->chroma_inter_matrix, s->inter_matrix, 0);
1217  if (get_bits1(&s->gb))
1218  load_matrix(s, s->chroma_intra_matrix, NULL, 1);
1219  if (get_bits1(&s->gb))
1220  load_matrix(s, s->chroma_inter_matrix, NULL, 0);
1221 }
1222 
1224 {
1225  MpegEncContext *s = &s1->mpeg_enc_ctx;
1226 
1227  s->full_pel[0] = s->full_pel[1] = 0;
1228  s->mpeg_f_code[0][0] = get_bits(&s->gb, 4);
1229  s->mpeg_f_code[0][1] = get_bits(&s->gb, 4);
1230  s->mpeg_f_code[1][0] = get_bits(&s->gb, 4);
1231  s->mpeg_f_code[1][1] = get_bits(&s->gb, 4);
1232  s->mpeg_f_code[0][0] += !s->mpeg_f_code[0][0];
1233  s->mpeg_f_code[0][1] += !s->mpeg_f_code[0][1];
1234  s->mpeg_f_code[1][0] += !s->mpeg_f_code[1][0];
1235  s->mpeg_f_code[1][1] += !s->mpeg_f_code[1][1];
1236  if (!s->pict_type && s1->mpeg_enc_ctx_allocated) {
1237  av_log(s->avctx, AV_LOG_ERROR, "Missing picture start code\n");
1238  if (s->avctx->err_recognition & AV_EF_EXPLODE)
1239  return AVERROR_INVALIDDATA;
1240  av_log(s->avctx, AV_LOG_WARNING, "Guessing pict_type from mpeg_f_code\n");
1241  if (s->mpeg_f_code[1][0] == 15 && s->mpeg_f_code[1][1] == 15) {
1242  if (s->mpeg_f_code[0][0] == 15 && s->mpeg_f_code[0][1] == 15)
1243  s->pict_type = AV_PICTURE_TYPE_I;
1244  else
1245  s->pict_type = AV_PICTURE_TYPE_P;
1246  } else
1247  s->pict_type = AV_PICTURE_TYPE_B;
1248  }
1249 
1250  s->intra_dc_precision = get_bits(&s->gb, 2);
1251  s->picture_structure = get_bits(&s->gb, 2);
1252  s->top_field_first = get_bits1(&s->gb);
1253  s->frame_pred_frame_dct = get_bits1(&s->gb);
1254  s->concealment_motion_vectors = get_bits1(&s->gb);
1255  s->q_scale_type = get_bits1(&s->gb);
1256  s->intra_vlc_format = get_bits1(&s->gb);
1257  s->alternate_scan = get_bits1(&s->gb);
1258  s->repeat_first_field = get_bits1(&s->gb);
1259  s->chroma_420_type = get_bits1(&s->gb);
1260  s->progressive_frame = get_bits1(&s->gb);
1261 
1262  if (s->alternate_scan) {
1263  ff_init_scantable(s->idsp.idct_permutation, &s->inter_scantable, ff_alternate_vertical_scan);
1264  ff_init_scantable(s->idsp.idct_permutation, &s->intra_scantable, ff_alternate_vertical_scan);
1265  } else {
1266  ff_init_scantable(s->idsp.idct_permutation, &s->inter_scantable, ff_zigzag_direct);
1267  ff_init_scantable(s->idsp.idct_permutation, &s->intra_scantable, ff_zigzag_direct);
1268  }
1269 
1270  /* composite display not parsed */
1271  ff_dlog(s->avctx, "intra_dc_precision=%d\n", s->intra_dc_precision);
1272  ff_dlog(s->avctx, "picture_structure=%d\n", s->picture_structure);
1273  ff_dlog(s->avctx, "top field first=%d\n", s->top_field_first);
1274  ff_dlog(s->avctx, "repeat first field=%d\n", s->repeat_first_field);
1275  ff_dlog(s->avctx, "conceal=%d\n", s->concealment_motion_vectors);
1276  ff_dlog(s->avctx, "intra_vlc_format=%d\n", s->intra_vlc_format);
1277  ff_dlog(s->avctx, "alternate_scan=%d\n", s->alternate_scan);
1278  ff_dlog(s->avctx, "frame_pred_frame_dct=%d\n", s->frame_pred_frame_dct);
1279  ff_dlog(s->avctx, "progressive_frame=%d\n", s->progressive_frame);
1280 
1281  return 0;
1282 }
1283 
1284 static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
1285 {
1286  AVCodecContext *avctx = s->avctx;
1287  Mpeg1Context *s1 = (Mpeg1Context *) s;
1288  int ret;
1289 
1290  if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)) {
1291  if (s->mb_width * s->mb_height * 11LL / (33 * 2 * 8) > buf_size)
1292  return AVERROR_INVALIDDATA;
1293  }
1294 
1295  /* start frame decoding */
1296  if (s->first_field || s->picture_structure == PICT_FRAME) {
1297  AVFrameSideData *pan_scan;
1298 
1299  if ((ret = ff_mpv_frame_start(s, avctx)) < 0)
1300  return ret;
1301 
1303 
1304  /* first check if we must repeat the frame */
1305  s->current_picture_ptr->f->repeat_pict = 0;
1306  if (s->repeat_first_field) {
1307  if (s->progressive_sequence) {
1308  if (s->top_field_first)
1309  s->current_picture_ptr->f->repeat_pict = 4;
1310  else
1311  s->current_picture_ptr->f->repeat_pict = 2;
1312  } else if (s->progressive_frame) {
1313  s->current_picture_ptr->f->repeat_pict = 1;
1314  }
1315  }
1316 
1317  ret = ff_frame_new_side_data(s->avctx, s->current_picture_ptr->f,
1318  AV_FRAME_DATA_PANSCAN, sizeof(s1->pan_scan),
1319  &pan_scan);
1320  if (ret < 0)
1321  return ret;
1322  if (pan_scan)
1323  memcpy(pan_scan->data, &s1->pan_scan, sizeof(s1->pan_scan));
1324 
1325  if (s1->a53_buf_ref) {
1327  s->avctx, s->current_picture_ptr->f, AV_FRAME_DATA_A53_CC,
1328  &s1->a53_buf_ref, NULL);
1329  if (ret < 0)
1330  return ret;
1331  }
1332 
1333  if (s1->has_stereo3d) {
1334  AVStereo3D *stereo = av_stereo3d_create_side_data(s->current_picture_ptr->f);
1335  if (!stereo)
1336  return AVERROR(ENOMEM);
1337 
1338  *stereo = s1->stereo3d;
1339  s1->has_stereo3d = 0;
1340  }
1341 
1342  if (s1->has_afd) {
1343  AVFrameSideData *sd;
1344  ret = ff_frame_new_side_data(s->avctx, s->current_picture_ptr->f,
1345  AV_FRAME_DATA_AFD, 1, &sd);
1346  if (ret < 0)
1347  return ret;
1348  if (sd)
1349  *sd->data = s1->afd;
1350  s1->has_afd = 0;
1351  }
1352 
1353  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME))
1354  ff_thread_finish_setup(avctx);
1355  } else { // second field
1356  int i;
1357 
1358  if (!s->current_picture_ptr) {
1359  av_log(s->avctx, AV_LOG_ERROR, "first field missing\n");
1360  return AVERROR_INVALIDDATA;
1361  }
1362 
1363  if (s->avctx->hwaccel) {
1364  if ((ret = FF_HW_SIMPLE_CALL(s->avctx, end_frame)) < 0) {
1365  av_log(avctx, AV_LOG_ERROR,
1366  "hardware accelerator failed to decode first field\n");
1367  return ret;
1368  }
1369  }
1370 
1371  for (i = 0; i < 4; i++) {
1372  s->current_picture.f->data[i] = s->current_picture_ptr->f->data[i];
1373  if (s->picture_structure == PICT_BOTTOM_FIELD)
1374  s->current_picture.f->data[i] +=
1375  s->current_picture_ptr->f->linesize[i];
1376  }
1377  }
1378 
1379  if (avctx->hwaccel) {
1380  if ((ret = FF_HW_CALL(avctx, start_frame, buf, buf_size)) < 0)
1381  return ret;
1382  }
1383 
1384  return 0;
1385 }
1386 
1387 #define DECODE_SLICE_ERROR -1
1388 #define DECODE_SLICE_OK 0
1389 
1390 /**
1391  * Decode a slice.
1392  * MpegEncContext.mb_y must be set to the MB row from the startcode.
1393  * @return DECODE_SLICE_ERROR if the slice is damaged,
1394  * DECODE_SLICE_OK if this slice is OK
1395  */
1396 static int mpeg_decode_slice(MpegEncContext *s, int mb_y,
1397  const uint8_t **buf, int buf_size)
1398 {
1399  AVCodecContext *avctx = s->avctx;
1400  const int lowres = s->avctx->lowres;
1401  const int field_pic = s->picture_structure != PICT_FRAME;
1402  int ret;
1403 
1404  s->resync_mb_x =
1405  s->resync_mb_y = -1;
1406 
1407  av_assert0(mb_y < s->mb_height);
1408 
1409  ret = init_get_bits8(&s->gb, *buf, buf_size);
1410  if (ret < 0)
1411  return ret;
1412 
1413  if (s->codec_id != AV_CODEC_ID_MPEG1VIDEO && s->mb_height > 2800/16)
1414  skip_bits(&s->gb, 3);
1415 
1417  s->interlaced_dct = 0;
1418 
1419  s->qscale = mpeg_get_qscale(s);
1420 
1421  if (s->qscale == 0) {
1422  av_log(s->avctx, AV_LOG_ERROR, "qscale == 0\n");
1423  return AVERROR_INVALIDDATA;
1424  }
1425 
1426  /* extra slice info */
1427  if (skip_1stop_8data_bits(&s->gb) < 0)
1428  return AVERROR_INVALIDDATA;
1429 
1430  s->mb_x = 0;
1431 
1432  if (mb_y == 0 && s->codec_tag == AV_RL32("SLIF")) {
1433  skip_bits1(&s->gb);
1434  } else {
1435  while (get_bits_left(&s->gb) > 0) {
1436  int code = get_vlc2(&s->gb, ff_mbincr_vlc,
1437  MBINCR_VLC_BITS, 2);
1438  if (code < 0) {
1439  av_log(s->avctx, AV_LOG_ERROR, "first mb_incr damaged\n");
1440  return AVERROR_INVALIDDATA;
1441  }
1442  if (code >= 33) {
1443  if (code == 33)
1444  s->mb_x += 33;
1445  /* otherwise, stuffing, nothing to do */
1446  } else {
1447  s->mb_x += code;
1448  break;
1449  }
1450  }
1451  }
1452 
1453  if (s->mb_x >= (unsigned) s->mb_width) {
1454  av_log(s->avctx, AV_LOG_ERROR, "initial skip overflow\n");
1455  return AVERROR_INVALIDDATA;
1456  }
1457 
1458  if (avctx->hwaccel) {
1459  const uint8_t *buf_end, *buf_start = *buf - 4; /* include start_code */
1460  int start_code = -1;
1461  buf_end = avpriv_find_start_code(buf_start + 2, *buf + buf_size, &start_code);
1462  if (buf_end < *buf + buf_size)
1463  buf_end -= 4;
1464  s->mb_y = mb_y;
1465  if (FF_HW_CALL(avctx, decode_slice, buf_start, buf_end - buf_start) < 0)
1466  return DECODE_SLICE_ERROR;
1467  *buf = buf_end;
1468  return DECODE_SLICE_OK;
1469  }
1470 
1471  s->resync_mb_x = s->mb_x;
1472  s->resync_mb_y = s->mb_y = mb_y;
1473  s->mb_skip_run = 0;
1475 
1476  if (s->mb_y == 0 && s->mb_x == 0 && (s->first_field || s->picture_structure == PICT_FRAME)) {
1477  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
1478  av_log(s->avctx, AV_LOG_DEBUG,
1479  "qp:%d fc:%2d%2d%2d%2d %c %s %s %s %s dc:%d pstruct:%d fdct:%d cmv:%d qtype:%d ivlc:%d rff:%d %s\n",
1480  s->qscale,
1481  s->mpeg_f_code[0][0], s->mpeg_f_code[0][1],
1482  s->mpeg_f_code[1][0], s->mpeg_f_code[1][1],
1483  s->pict_type == AV_PICTURE_TYPE_I ? 'I' :
1484  (s->pict_type == AV_PICTURE_TYPE_P ? 'P' :
1485  (s->pict_type == AV_PICTURE_TYPE_B ? 'B' : 'S')),
1486  s->progressive_sequence ? "ps" : "",
1487  s->progressive_frame ? "pf" : "",
1488  s->alternate_scan ? "alt" : "",
1489  s->top_field_first ? "top" : "",
1490  s->intra_dc_precision, s->picture_structure,
1491  s->frame_pred_frame_dct, s->concealment_motion_vectors,
1492  s->q_scale_type, s->intra_vlc_format,
1493  s->repeat_first_field, s->chroma_420_type ? "420" : "");
1494  }
1495  }
1496 
1497  for (;;) {
1498  if ((ret = mpeg_decode_mb(s, s->block)) < 0)
1499  return ret;
1500 
1501  // Note motion_val is normally NULL unless we want to extract the MVs.
1502  if (s->current_picture.motion_val[0]) {
1503  const int wrap = s->b8_stride;
1504  int xy = s->mb_x * 2 + s->mb_y * 2 * wrap;
1505  int b8_xy = 4 * (s->mb_x + s->mb_y * s->mb_stride);
1506  int motion_x, motion_y, dir, i;
1507 
1508  for (i = 0; i < 2; i++) {
1509  for (dir = 0; dir < 2; dir++) {
1510  if (s->mb_intra ||
1511  (dir == 1 && s->pict_type != AV_PICTURE_TYPE_B)) {
1512  motion_x = motion_y = 0;
1513  } else if (s->mv_type == MV_TYPE_16X16 ||
1514  (s->mv_type == MV_TYPE_FIELD && field_pic)) {
1515  motion_x = s->mv[dir][0][0];
1516  motion_y = s->mv[dir][0][1];
1517  } else { /* if ((s->mv_type == MV_TYPE_FIELD) || (s->mv_type == MV_TYPE_16X8)) */
1518  motion_x = s->mv[dir][i][0];
1519  motion_y = s->mv[dir][i][1];
1520  }
1521 
1522  s->current_picture.motion_val[dir][xy][0] = motion_x;
1523  s->current_picture.motion_val[dir][xy][1] = motion_y;
1524  s->current_picture.motion_val[dir][xy + 1][0] = motion_x;
1525  s->current_picture.motion_val[dir][xy + 1][1] = motion_y;
1526  s->current_picture.ref_index [dir][b8_xy] =
1527  s->current_picture.ref_index [dir][b8_xy + 1] = s->field_select[dir][i];
1528  av_assert2(s->field_select[dir][i] == 0 ||
1529  s->field_select[dir][i] == 1);
1530  }
1531  xy += wrap;
1532  b8_xy += 2;
1533  }
1534  }
1535 
1536  s->dest[0] += 16 >> lowres;
1537  s->dest[1] +=(16 >> lowres) >> s->chroma_x_shift;
1538  s->dest[2] +=(16 >> lowres) >> s->chroma_x_shift;
1539 
1540  ff_mpv_reconstruct_mb(s, s->block);
1541 
1542  if (++s->mb_x >= s->mb_width) {
1543  const int mb_size = 16 >> s->avctx->lowres;
1544  int left;
1545 
1546  ff_mpeg_draw_horiz_band(s, mb_size * (s->mb_y >> field_pic), mb_size);
1548 
1549  s->mb_x = 0;
1550  s->mb_y += 1 << field_pic;
1551 
1552  if (s->mb_y >= s->mb_height) {
1553  int left = get_bits_left(&s->gb);
1554  int is_d10 = s->chroma_format == 2 &&
1555  s->pict_type == AV_PICTURE_TYPE_I &&
1556  avctx->profile == 0 && avctx->level == 5 &&
1557  s->intra_dc_precision == 2 &&
1558  s->q_scale_type == 1 && s->alternate_scan == 0 &&
1559  s->progressive_frame == 0
1560  /* vbv_delay == 0xBBB || 0xE10 */;
1561 
1562  if (left >= 32 && !is_d10) {
1563  GetBitContext gb = s->gb;
1564  align_get_bits(&gb);
1565  if (show_bits(&gb, 24) == 0x060E2B) {
1566  av_log(avctx, AV_LOG_DEBUG, "Invalid MXF data found in video stream\n");
1567  is_d10 = 1;
1568  }
1569  if (left > 32 && show_bits_long(&gb, 32) == 0x201) {
1570  av_log(avctx, AV_LOG_DEBUG, "skipping m704 alpha (unsupported)\n");
1571  goto eos;
1572  }
1573  }
1574 
1575  if (left < 0 ||
1576  (left && show_bits(&s->gb, FFMIN(left, 23)) && !is_d10) ||
1577  ((avctx->err_recognition & (AV_EF_BITSTREAM | AV_EF_AGGRESSIVE)) && left > 8)) {
1578  av_log(avctx, AV_LOG_ERROR, "end mismatch left=%d %0X at %d %d\n",
1579  left, left>0 ? show_bits(&s->gb, FFMIN(left, 23)) : 0, s->mb_x, s->mb_y);
1580  return AVERROR_INVALIDDATA;
1581  } else
1582  goto eos;
1583  }
1584  // There are some files out there which are missing the last slice
1585  // in cases where the slice is completely outside the visible
1586  // area, we detect this here instead of running into the end expecting
1587  // more data
1588  left = get_bits_left(&s->gb);
1589  if (s->mb_y >= ((s->height + 15) >> 4) &&
1590  !s->progressive_sequence &&
1591  left <= 25 &&
1592  left >= 0 &&
1593  s->mb_skip_run == -1 &&
1594  (!left || show_bits(&s->gb, left) == 0))
1595  goto eos;
1596 
1598  }
1599 
1600  /* skip mb handling */
1601  if (s->mb_skip_run == -1) {
1602  /* read increment again */
1603  s->mb_skip_run = 0;
1604  for (;;) {
1605  int code = get_vlc2(&s->gb, ff_mbincr_vlc,
1606  MBINCR_VLC_BITS, 2);
1607  if (code < 0) {
1608  av_log(s->avctx, AV_LOG_ERROR, "mb incr damaged\n");
1609  return AVERROR_INVALIDDATA;
1610  }
1611  if (code >= 33) {
1612  if (code == 33) {
1613  s->mb_skip_run += 33;
1614  } else if (code == 35) {
1615  if (s->mb_skip_run != 0 || show_bits(&s->gb, 15) != 0) {
1616  av_log(s->avctx, AV_LOG_ERROR, "slice mismatch\n");
1617  return AVERROR_INVALIDDATA;
1618  }
1619  goto eos; /* end of slice */
1620  }
1621  /* otherwise, stuffing, nothing to do */
1622  } else {
1623  s->mb_skip_run += code;
1624  break;
1625  }
1626  }
1627  if (s->mb_skip_run) {
1628  int i;
1629  if (s->pict_type == AV_PICTURE_TYPE_I) {
1630  av_log(s->avctx, AV_LOG_ERROR,
1631  "skipped MB in I-frame at %d %d\n", s->mb_x, s->mb_y);
1632  return AVERROR_INVALIDDATA;
1633  }
1634 
1635  /* skip mb */
1636  s->mb_intra = 0;
1637  for (i = 0; i < 12; i++)
1638  s->block_last_index[i] = -1;
1639  if (s->picture_structure == PICT_FRAME)
1640  s->mv_type = MV_TYPE_16X16;
1641  else
1642  s->mv_type = MV_TYPE_FIELD;
1643  if (s->pict_type == AV_PICTURE_TYPE_P) {
1644  /* if P type, zero motion vector is implied */
1645  s->mv_dir = MV_DIR_FORWARD;
1646  s->mv[0][0][0] = s->mv[0][0][1] = 0;
1647  s->last_mv[0][0][0] = s->last_mv[0][0][1] = 0;
1648  s->last_mv[0][1][0] = s->last_mv[0][1][1] = 0;
1649  s->field_select[0][0] = (s->picture_structure - 1) & 1;
1650  } else {
1651  /* if B type, reuse previous vectors and directions */
1652  s->mv[0][0][0] = s->last_mv[0][0][0];
1653  s->mv[0][0][1] = s->last_mv[0][0][1];
1654  s->mv[1][0][0] = s->last_mv[1][0][0];
1655  s->mv[1][0][1] = s->last_mv[1][0][1];
1656  s->field_select[0][0] = (s->picture_structure - 1) & 1;
1657  s->field_select[1][0] = (s->picture_structure - 1) & 1;
1658  }
1659  }
1660  }
1661  }
1662 eos: // end of slice
1663  if (get_bits_left(&s->gb) < 0) {
1664  av_log(s, AV_LOG_ERROR, "overread %d\n", -get_bits_left(&s->gb));
1665  return AVERROR_INVALIDDATA;
1666  }
1667  *buf += (get_bits_count(&s->gb) - 1) / 8;
1668  ff_dlog(s, "Slice start:%d %d end:%d %d\n", s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y);
1669  return 0;
1670 }
1671 
1673 {
1674  MpegEncContext *s = *(void **) arg;
1675  const uint8_t *buf = s->gb.buffer;
1676  int mb_y = s->start_mb_y;
1677  const int field_pic = s->picture_structure != PICT_FRAME;
1678 
1679  s->er.error_count = (3 * (s->end_mb_y - s->start_mb_y) * s->mb_width) >> field_pic;
1680 
1681  for (;;) {
1682  uint32_t start_code;
1683  int ret;
1684 
1685  ret = mpeg_decode_slice(s, mb_y, &buf, s->gb.buffer_end - buf);
1686  emms_c();
1687  ff_dlog(c, "ret:%d resync:%d/%d mb:%d/%d ts:%d/%d ec:%d\n",
1688  ret, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y,
1689  s->start_mb_y, s->end_mb_y, s->er.error_count);
1690  if (ret < 0) {
1691  if (c->err_recognition & AV_EF_EXPLODE)
1692  return ret;
1693  if (s->resync_mb_x >= 0 && s->resync_mb_y >= 0)
1694  ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
1695  s->mb_x, s->mb_y,
1697  } else {
1698  ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
1699  s->mb_x - 1, s->mb_y,
1701  }
1702 
1703  if (s->mb_y == s->end_mb_y)
1704  return 0;
1705 
1706  start_code = -1;
1707  buf = avpriv_find_start_code(buf, s->gb.buffer_end, &start_code);
1708  if (start_code < SLICE_MIN_START_CODE || start_code > SLICE_MAX_START_CODE)
1709  return AVERROR_INVALIDDATA;
1711  if (s->codec_id != AV_CODEC_ID_MPEG1VIDEO && s->mb_height > 2800/16)
1712  mb_y += (*buf&0xE0)<<2;
1713  mb_y <<= field_pic;
1714  if (s->picture_structure == PICT_BOTTOM_FIELD)
1715  mb_y++;
1716  if (mb_y >= s->end_mb_y)
1717  return AVERROR_INVALIDDATA;
1718  }
1719 }
1720 
1721 /**
1722  * Handle slice ends.
1723  * @return 1 if it seems to be the last slice
1724  */
1725 static int slice_end(AVCodecContext *avctx, AVFrame *pict)
1726 {
1727  Mpeg1Context *s1 = avctx->priv_data;
1728  MpegEncContext *s = &s1->mpeg_enc_ctx;
1729 
1730  if (!s1->mpeg_enc_ctx_allocated || !s->current_picture_ptr)
1731  return 0;
1732 
1733  if (s->avctx->hwaccel) {
1734  int ret = FF_HW_SIMPLE_CALL(s->avctx, end_frame);
1735  if (ret < 0) {
1736  av_log(avctx, AV_LOG_ERROR,
1737  "hardware accelerator failed to decode picture\n");
1738  return ret;
1739  }
1740  }
1741 
1742  /* end of slice reached */
1743  if (/* s->mb_y << field_pic == s->mb_height && */ !s->first_field && !s1->first_slice) {
1744  /* end of image */
1745 
1746  ff_er_frame_end(&s->er, NULL);
1747 
1749 
1750  if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1751  int ret = av_frame_ref(pict, s->current_picture_ptr->f);
1752  if (ret < 0)
1753  return ret;
1754  ff_print_debug_info(s, s->current_picture_ptr, pict);
1755  ff_mpv_export_qp_table(s, pict, s->current_picture_ptr, FF_MPV_QSCALE_TYPE_MPEG2);
1756  } else {
1757  /* latency of 1 frame for I- and P-frames */
1758  if (s->last_picture_ptr) {
1759  int ret = av_frame_ref(pict, s->last_picture_ptr->f);
1760  if (ret < 0)
1761  return ret;
1762  ff_print_debug_info(s, s->last_picture_ptr, pict);
1763  ff_mpv_export_qp_table(s, pict, s->last_picture_ptr, FF_MPV_QSCALE_TYPE_MPEG2);
1764  }
1765  }
1766 
1767  return 1;
1768  } else {
1769  return 0;
1770  }
1771 }
1772 
1774  const uint8_t *buf, int buf_size)
1775 {
1776  Mpeg1Context *s1 = avctx->priv_data;
1777  MpegEncContext *s = &s1->mpeg_enc_ctx;
1778  int width, height;
1779  int i, v, j;
1780 
1781  int ret = init_get_bits8(&s->gb, buf, buf_size);
1782  if (ret < 0)
1783  return ret;
1784 
1785  width = get_bits(&s->gb, 12);
1786  height = get_bits(&s->gb, 12);
1787  if (width == 0 || height == 0) {
1788  av_log(avctx, AV_LOG_WARNING,
1789  "Invalid horizontal or vertical size value.\n");
1791  return AVERROR_INVALIDDATA;
1792  }
1793  s1->aspect_ratio_info = get_bits(&s->gb, 4);
1794  if (s1->aspect_ratio_info == 0) {
1795  av_log(avctx, AV_LOG_ERROR, "aspect ratio has forbidden 0 value\n");
1797  return AVERROR_INVALIDDATA;
1798  }
1799  s1->frame_rate_index = get_bits(&s->gb, 4);
1800  if (s1->frame_rate_index == 0 || s1->frame_rate_index > 13) {
1801  av_log(avctx, AV_LOG_WARNING,
1802  "frame_rate_index %d is invalid\n", s1->frame_rate_index);
1803  s1->frame_rate_index = 1;
1804  }
1805  s->bit_rate = get_bits(&s->gb, 18) * 400LL;
1806  if (check_marker(s->avctx, &s->gb, "in sequence header") == 0) {
1807  return AVERROR_INVALIDDATA;
1808  }
1809 
1810  s->avctx->rc_buffer_size = get_bits(&s->gb, 10) * 1024 * 16;
1811  skip_bits(&s->gb, 1);
1812 
1813  /* get matrix */
1814  if (get_bits1(&s->gb)) {
1815  load_matrix(s, s->chroma_intra_matrix, s->intra_matrix, 1);
1816  } else {
1817  for (i = 0; i < 64; i++) {
1818  j = s->idsp.idct_permutation[i];
1820  s->intra_matrix[j] = v;
1821  s->chroma_intra_matrix[j] = v;
1822  }
1823  }
1824  if (get_bits1(&s->gb)) {
1825  load_matrix(s, s->chroma_inter_matrix, s->inter_matrix, 0);
1826  } else {
1827  for (i = 0; i < 64; i++) {
1828  int j = s->idsp.idct_permutation[i];
1830  s->inter_matrix[j] = v;
1831  s->chroma_inter_matrix[j] = v;
1832  }
1833  }
1834 
1835  if (show_bits(&s->gb, 23) != 0) {
1836  av_log(s->avctx, AV_LOG_ERROR, "sequence header damaged\n");
1837  return AVERROR_INVALIDDATA;
1838  }
1839 
1840  s->width = width;
1841  s->height = height;
1842 
1843  /* We set MPEG-2 parameters so that it emulates MPEG-1. */
1844  s->progressive_sequence = 1;
1845  s->progressive_frame = 1;
1846  s->picture_structure = PICT_FRAME;
1847  s->first_field = 0;
1848  s->frame_pred_frame_dct = 1;
1849  s->chroma_format = 1;
1850  s->codec_id =
1851  s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
1852  s->out_format = FMT_MPEG1;
1853  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1854  s->low_delay = 1;
1855 
1856  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1857  av_log(s->avctx, AV_LOG_DEBUG, "vbv buffer: %d, bitrate:%"PRId64", aspect_ratio_info: %d \n",
1858  s->avctx->rc_buffer_size, s->bit_rate, s1->aspect_ratio_info);
1859 
1860  return 0;
1861 }
1862 
1864 {
1865  Mpeg1Context *s1 = avctx->priv_data;
1866  MpegEncContext *s = &s1->mpeg_enc_ctx;
1867  int i, v, ret;
1868 
1869  /* start new MPEG-1 context decoding */
1870  s->out_format = FMT_MPEG1;
1871  if (s1->mpeg_enc_ctx_allocated) {
1873  s1->mpeg_enc_ctx_allocated = 0;
1874  }
1875  s->width = avctx->coded_width;
1876  s->height = avctx->coded_height;
1877  avctx->has_b_frames = 0; // true?
1878  s->low_delay = 1;
1879 
1880  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
1881 
1882  if ((ret = ff_mpv_common_init(s)) < 0)
1883  return ret;
1884  s1->mpeg_enc_ctx_allocated = 1;
1885 
1886  for (i = 0; i < 64; i++) {
1887  int j = s->idsp.idct_permutation[i];
1889  s->intra_matrix[j] = v;
1890  s->chroma_intra_matrix[j] = v;
1891 
1893  s->inter_matrix[j] = v;
1894  s->chroma_inter_matrix[j] = v;
1895  }
1896 
1897  s->progressive_sequence = 1;
1898  s->progressive_frame = 1;
1899  s->picture_structure = PICT_FRAME;
1900  s->first_field = 0;
1901  s->frame_pred_frame_dct = 1;
1902  s->chroma_format = 1;
1903  if (s->codec_tag == AV_RL32("BW10")) {
1904  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
1905  } else {
1906  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1907  }
1908  s1->save_width = s->width;
1909  s1->save_height = s->height;
1910  s1->save_progressive_seq = s->progressive_sequence;
1911  return 0;
1912 }
1913 
1915  const char *label)
1916 {
1917  Mpeg1Context *s1 = avctx->priv_data;
1918 
1920 
1921  if (!s1->cc_format) {
1922  s1->cc_format = format;
1923 
1924  av_log(avctx, AV_LOG_DEBUG, "CC: first seen substream is %s format\n", label);
1925  }
1926 }
1927 
1929  const uint8_t *p, int buf_size)
1930 {
1931  Mpeg1Context *s1 = avctx->priv_data;
1932 
1933  if ((!s1->cc_format || s1->cc_format == CC_FORMAT_A53_PART4) &&
1934  buf_size >= 6 &&
1935  p[0] == 'G' && p[1] == 'A' && p[2] == '9' && p[3] == '4' &&
1936  p[4] == 3 && (p[5] & 0x40)) {
1937  /* extract A53 Part 4 CC data */
1938  int cc_count = p[5] & 0x1f;
1939  if (cc_count > 0 && buf_size >= 7 + cc_count * 3) {
1940  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1941  const uint64_t new_size = (old_size + cc_count
1942  * UINT64_C(3));
1943  int ret;
1944 
1945  if (new_size > 3*A53_MAX_CC_COUNT)
1946  return AVERROR(EINVAL);
1947 
1948  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
1949  if (ret >= 0)
1950  memcpy(s1->a53_buf_ref->data + old_size, p + 7, cc_count * UINT64_C(3));
1951 
1953  mpeg_set_cc_format(avctx, CC_FORMAT_A53_PART4, "A/53 Part 4");
1954  }
1955  return 1;
1956  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_SCTE20) &&
1957  buf_size >= 2 &&
1958  p[0] == 0x03 && (p[1]&0x7f) == 0x01) {
1959  /* extract SCTE-20 CC data */
1960  GetBitContext gb;
1961  int cc_count = 0;
1962  int i, ret;
1963 
1964  ret = init_get_bits8(&gb, p + 2, buf_size - 2);
1965  if (ret < 0)
1966  return ret;
1967  cc_count = get_bits(&gb, 5);
1968  if (cc_count > 0) {
1969  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1970  const uint64_t new_size = (old_size + cc_count
1971  * UINT64_C(3));
1972  if (new_size > 3*A53_MAX_CC_COUNT)
1973  return AVERROR(EINVAL);
1974 
1975  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
1976  if (ret >= 0) {
1977  uint8_t field, cc1, cc2;
1978  uint8_t *cap = s1->a53_buf_ref->data;
1979 
1980  memset(s1->a53_buf_ref->data + old_size, 0, cc_count * 3);
1981  for (i = 0; i < cc_count && get_bits_left(&gb) >= 26; i++) {
1982  skip_bits(&gb, 2); // priority
1983  field = get_bits(&gb, 2);
1984  skip_bits(&gb, 5); // line_offset
1985  cc1 = get_bits(&gb, 8);
1986  cc2 = get_bits(&gb, 8);
1987  skip_bits(&gb, 1); // marker
1988 
1989  if (!field) { // forbidden
1990  cap[0] = cap[1] = cap[2] = 0x00;
1991  } else {
1992  field = (field == 2 ? 1 : 0);
1993  if (!s1->mpeg_enc_ctx.top_field_first) field = !field;
1994  cap[0] = 0x04 | field;
1995  cap[1] = ff_reverse[cc1];
1996  cap[2] = ff_reverse[cc2];
1997  }
1998  cap += 3;
1999  }
2000  }
2001 
2003  mpeg_set_cc_format(avctx, CC_FORMAT_SCTE20, "SCTE-20");
2004  }
2005  return 1;
2006  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_DVD) &&
2007  buf_size >= 11 &&
2008  p[0] == 'C' && p[1] == 'C' && p[2] == 0x01 && p[3] == 0xf8) {
2009  /* extract DVD CC data
2010  *
2011  * uint32_t user_data_start_code 0x000001B2 (big endian)
2012  * uint16_t user_identifier 0x4343 "CC"
2013  * uint8_t user_data_type_code 0x01
2014  * uint8_t caption_block_size 0xF8
2015  * uint8_t
2016  * bit 7 caption_odd_field_first 1=odd field (CC1/CC2) first 0=even field (CC3/CC4) first
2017  * bit 6 caption_filler 0
2018  * bit 5:1 caption_block_count number of caption blocks (pairs of caption words = frames). Most DVDs use 15 per start of GOP.
2019  * bit 0 caption_extra_field_added 1=one additional caption word
2020  *
2021  * struct caption_field_block {
2022  * uint8_t
2023  * bit 7:1 caption_filler 0x7F (all 1s)
2024  * bit 0 caption_field_odd 1=odd field (this is CC1/CC2) 0=even field (this is CC3/CC4)
2025  * uint8_t caption_first_byte
2026  * uint8_t caption_second_byte
2027  * } caption_block[(caption_block_count * 2) + caption_extra_field_added];
2028  *
2029  * Some DVDs encode caption data for both fields with caption_field_odd=1. The only way to decode the fields
2030  * correctly is to start on the field indicated by caption_odd_field_first and count between odd/even fields.
2031  * Don't assume that the first caption word is the odd field. There do exist MPEG files in the wild that start
2032  * on the even field. There also exist DVDs in the wild that encode an odd field count and the
2033  * caption_extra_field_added/caption_odd_field_first bits change per packet to allow that. */
2034  int cc_count = 0;
2035  int i, ret;
2036  // There is a caption count field in the data, but it is often
2037  // incorrect. So count the number of captions present.
2038  for (i = 5; i + 6 <= buf_size && ((p[i] & 0xfe) == 0xfe); i += 6)
2039  cc_count++;
2040  // Transform the DVD format into A53 Part 4 format
2041  if (cc_count > 0) {
2042  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2043  const uint64_t new_size = (old_size + cc_count
2044  * UINT64_C(6));
2045  if (new_size > 3*A53_MAX_CC_COUNT)
2046  return AVERROR(EINVAL);
2047 
2048  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2049  if (ret >= 0) {
2050  uint8_t field1 = !!(p[4] & 0x80);
2051  uint8_t *cap = s1->a53_buf_ref->data;
2052  p += 5;
2053  for (i = 0; i < cc_count; i++) {
2054  cap[0] = (p[0] == 0xff && field1) ? 0xfc : 0xfd;
2055  cap[1] = p[1];
2056  cap[2] = p[2];
2057  cap[3] = (p[3] == 0xff && !field1) ? 0xfc : 0xfd;
2058  cap[4] = p[4];
2059  cap[5] = p[5];
2060  cap += 6;
2061  p += 6;
2062  }
2063  }
2064 
2066  mpeg_set_cc_format(avctx, CC_FORMAT_DVD, "DVD");
2067  }
2068  return 1;
2069  }
2070  return 0;
2071 }
2072 
2074  const uint8_t *p, int buf_size)
2075 {
2076  Mpeg1Context *s = avctx->priv_data;
2077  const uint8_t *buf_end = p + buf_size;
2078  Mpeg1Context *s1 = avctx->priv_data;
2079 
2080 #if 0
2081  int i;
2082  for(i=0; !(!p[i-2] && !p[i-1] && p[i]==1) && i<buf_size; i++){
2083  av_log(avctx, AV_LOG_ERROR, "%c", p[i]);
2084  }
2085  av_log(avctx, AV_LOG_ERROR, "\n");
2086 #endif
2087 
2088  if (buf_size > 29){
2089  int i;
2090  for(i=0; i<20; i++)
2091  if (!memcmp(p+i, "\0TMPGEXS\0", 9)){
2092  s->tmpgexs= 1;
2093  }
2094  }
2095  /* we parse the DTG active format information */
2096  if (buf_end - p >= 5 &&
2097  p[0] == 'D' && p[1] == 'T' && p[2] == 'G' && p[3] == '1') {
2098  int flags = p[4];
2099  p += 5;
2100  if (flags & 0x80) {
2101  /* skip event id */
2102  p += 2;
2103  }
2104  if (flags & 0x40) {
2105  if (buf_end - p < 1)
2106  return;
2107  s1->has_afd = 1;
2108  s1->afd = p[0] & 0x0f;
2109  }
2110  } else if (buf_end - p >= 6 &&
2111  p[0] == 'J' && p[1] == 'P' && p[2] == '3' && p[3] == 'D' &&
2112  p[4] == 0x03) { // S3D_video_format_length
2113  // the 0x7F mask ignores the reserved_bit value
2114  const uint8_t S3D_video_format_type = p[5] & 0x7F;
2115 
2116  if (S3D_video_format_type == 0x03 ||
2117  S3D_video_format_type == 0x04 ||
2118  S3D_video_format_type == 0x08 ||
2119  S3D_video_format_type == 0x23) {
2120 
2121  s1->has_stereo3d = 1;
2122 
2123  switch (S3D_video_format_type) {
2124  case 0x03:
2125  s1->stereo3d.type = AV_STEREO3D_SIDEBYSIDE;
2126  break;
2127  case 0x04:
2128  s1->stereo3d.type = AV_STEREO3D_TOPBOTTOM;
2129  break;
2130  case 0x08:
2131  s1->stereo3d.type = AV_STEREO3D_2D;
2132  break;
2133  case 0x23:
2134  s1->stereo3d.type = AV_STEREO3D_SIDEBYSIDE_QUINCUNX;
2135  break;
2136  }
2137  }
2138  } else if (mpeg_decode_a53_cc(avctx, p, buf_size)) {
2139  return;
2140  }
2141 }
2142 
2144  const uint8_t *buf, int buf_size)
2145 {
2146  Mpeg1Context *s1 = avctx->priv_data;
2147  MpegEncContext *s = &s1->mpeg_enc_ctx;
2148  int broken_link;
2149  int64_t tc;
2150 
2151  int ret = init_get_bits8(&s->gb, buf, buf_size);
2152  if (ret < 0)
2153  return ret;
2154 
2155  tc = s1->timecode_frame_start = get_bits(&s->gb, 25);
2156 
2157  s1->closed_gop = get_bits1(&s->gb);
2158  /* broken_link indicates that after editing the
2159  * reference frames of the first B-Frames after GOP I-Frame
2160  * are missing (open gop) */
2161  broken_link = get_bits1(&s->gb);
2162 
2163  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
2164  char tcbuf[AV_TIMECODE_STR_SIZE];
2166  av_log(s->avctx, AV_LOG_DEBUG,
2167  "GOP (%s) closed_gop=%d broken_link=%d\n",
2168  tcbuf, s1->closed_gop, broken_link);
2169  }
2170 
2171  return 0;
2172 }
2173 
2174 static int decode_chunks(AVCodecContext *avctx, AVFrame *picture,
2175  int *got_output, const uint8_t *buf, int buf_size)
2176 {
2177  Mpeg1Context *s = avctx->priv_data;
2178  MpegEncContext *s2 = &s->mpeg_enc_ctx;
2179  const uint8_t *buf_ptr = buf;
2180  const uint8_t *buf_end = buf + buf_size;
2181  int ret, input_size;
2182  int last_code = 0, skip_frame = 0;
2183  int picture_start_code_seen = 0;
2184 
2185  for (;;) {
2186  /* find next start code */
2187  uint32_t start_code = -1;
2188  buf_ptr = avpriv_find_start_code(buf_ptr, buf_end, &start_code);
2189  if (start_code > 0x1ff) {
2190  if (!skip_frame) {
2191  if (HAVE_THREADS &&
2192  (avctx->active_thread_type & FF_THREAD_SLICE) &&
2193  !avctx->hwaccel) {
2194  int i;
2195  av_assert0(avctx->thread_count > 1);
2196 
2197  avctx->execute(avctx, slice_decode_thread,
2198  &s2->thread_context[0], NULL,
2199  s->slice_count, sizeof(void *));
2200  for (i = 0; i < s->slice_count; i++)
2201  s2->er.error_count += s2->thread_context[i]->er.error_count;
2202  }
2203 
2204  ret = slice_end(avctx, picture);
2205  if (ret < 0)
2206  return ret;
2207  else if (ret) {
2208  // FIXME: merge with the stuff in mpeg_decode_slice
2209  if (s2->last_picture_ptr || s2->low_delay || s2->pict_type == AV_PICTURE_TYPE_B)
2210  *got_output = 1;
2211  }
2212  }
2213  s2->pict_type = 0;
2214 
2215  if (avctx->err_recognition & AV_EF_EXPLODE && s2->er.error_count)
2216  return AVERROR_INVALIDDATA;
2217 
2218  return FFMAX(0, buf_ptr - buf);
2219  }
2220 
2221  input_size = buf_end - buf_ptr;
2222 
2223  if (avctx->debug & FF_DEBUG_STARTCODE)
2224  av_log(avctx, AV_LOG_DEBUG, "%3"PRIX32" at %"PTRDIFF_SPECIFIER" left %d\n",
2225  start_code, buf_ptr - buf, input_size);
2226 
2227  /* prepare data for next start code */
2228  switch (start_code) {
2229  case SEQ_START_CODE:
2230  if (last_code == 0) {
2231  mpeg1_decode_sequence(avctx, buf_ptr, input_size);
2232  if (buf != avctx->extradata)
2233  s->sync = 1;
2234  } else {
2235  av_log(avctx, AV_LOG_ERROR,
2236  "ignoring SEQ_START_CODE after %X\n", last_code);
2237  if (avctx->err_recognition & AV_EF_EXPLODE)
2238  return AVERROR_INVALIDDATA;
2239  }
2240  break;
2241 
2242  case PICTURE_START_CODE:
2243  if (picture_start_code_seen && s2->picture_structure == PICT_FRAME) {
2244  /* If it's a frame picture, there can't be more than one picture header.
2245  Yet, it does happen and we need to handle it. */
2246  av_log(avctx, AV_LOG_WARNING, "ignoring extra picture following a frame-picture\n");
2247  break;
2248  }
2249  picture_start_code_seen = 1;
2250 
2251  if (buf == avctx->extradata && avctx->codec_tag == AV_RL32("AVmp")) {
2252  av_log(avctx, AV_LOG_WARNING, "ignoring picture start code in AVmp extradata\n");
2253  break;
2254  }
2255 
2256  if (s2->width <= 0 || s2->height <= 0) {
2257  av_log(avctx, AV_LOG_ERROR, "Invalid frame dimensions %dx%d.\n",
2258  s2->width, s2->height);
2259  return AVERROR_INVALIDDATA;
2260  }
2261 
2262  if (s->tmpgexs){
2263  s2->intra_dc_precision= 3;
2264  s2->intra_matrix[0]= 1;
2265  }
2266  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
2267  !avctx->hwaccel && s->slice_count) {
2268  int i;
2269 
2270  avctx->execute(avctx, slice_decode_thread,
2271  s2->thread_context, NULL,
2272  s->slice_count, sizeof(void *));
2273  for (i = 0; i < s->slice_count; i++)
2274  s2->er.error_count += s2->thread_context[i]->er.error_count;
2275  s->slice_count = 0;
2276  }
2277  if (last_code == 0 || last_code == SLICE_MIN_START_CODE) {
2278  ret = mpeg_decode_postinit(avctx);
2279  if (ret < 0) {
2280  av_log(avctx, AV_LOG_ERROR,
2281  "mpeg_decode_postinit() failure\n");
2282  return ret;
2283  }
2284 
2285  /* We have a complete image: we try to decompress it. */
2286  if (mpeg1_decode_picture(avctx, buf_ptr, input_size) < 0)
2287  s2->pict_type = 0;
2288  s->first_slice = 1;
2289  last_code = PICTURE_START_CODE;
2290  } else {
2291  av_log(avctx, AV_LOG_ERROR,
2292  "ignoring pic after %X\n", last_code);
2293  if (avctx->err_recognition & AV_EF_EXPLODE)
2294  return AVERROR_INVALIDDATA;
2295  }
2296  break;
2297  case EXT_START_CODE:
2298  ret = init_get_bits8(&s2->gb, buf_ptr, input_size);
2299  if (ret < 0)
2300  return ret;
2301 
2302  switch (get_bits(&s2->gb, 4)) {
2303  case 0x1:
2304  if (last_code == 0) {
2306  } else {
2307  av_log(avctx, AV_LOG_ERROR,
2308  "ignoring seq ext after %X\n", last_code);
2309  if (avctx->err_recognition & AV_EF_EXPLODE)
2310  return AVERROR_INVALIDDATA;
2311  }
2312  break;
2313  case 0x2:
2315  break;
2316  case 0x3:
2318  break;
2319  case 0x7:
2321  break;
2322  case 0x8:
2323  if (last_code == PICTURE_START_CODE) {
2325  if (ret < 0)
2326  return ret;
2327  } else {
2328  av_log(avctx, AV_LOG_ERROR,
2329  "ignoring pic cod ext after %X\n", last_code);
2330  if (avctx->err_recognition & AV_EF_EXPLODE)
2331  return AVERROR_INVALIDDATA;
2332  }
2333  break;
2334  }
2335  break;
2336  case USER_START_CODE:
2337  mpeg_decode_user_data(avctx, buf_ptr, input_size);
2338  break;
2339  case GOP_START_CODE:
2340  if (last_code == 0) {
2341  s2->first_field = 0;
2342  ret = mpeg_decode_gop(avctx, buf_ptr, input_size);
2343  if (ret < 0)
2344  return ret;
2345  s->sync = 1;
2346  } else {
2347  av_log(avctx, AV_LOG_ERROR,
2348  "ignoring GOP_START_CODE after %X\n", last_code);
2349  if (avctx->err_recognition & AV_EF_EXPLODE)
2350  return AVERROR_INVALIDDATA;
2351  }
2352  break;
2353  default:
2355  start_code <= SLICE_MAX_START_CODE && last_code == PICTURE_START_CODE) {
2356  if (s2->progressive_sequence && !s2->progressive_frame) {
2357  s2->progressive_frame = 1;
2358  av_log(s2->avctx, AV_LOG_ERROR,
2359  "interlaced frame in progressive sequence, ignoring\n");
2360  }
2361 
2362  if (s2->picture_structure == 0 ||
2363  (s2->progressive_frame && s2->picture_structure != PICT_FRAME)) {
2364  av_log(s2->avctx, AV_LOG_ERROR,
2365  "picture_structure %d invalid, ignoring\n",
2366  s2->picture_structure);
2367  s2->picture_structure = PICT_FRAME;
2368  }
2369 
2370  if (s2->progressive_sequence && !s2->frame_pred_frame_dct)
2371  av_log(s2->avctx, AV_LOG_WARNING, "invalid frame_pred_frame_dct\n");
2372 
2373  if (s2->picture_structure == PICT_FRAME) {
2374  s2->first_field = 0;
2375  s2->v_edge_pos = 16 * s2->mb_height;
2376  } else {
2377  s2->first_field ^= 1;
2378  s2->v_edge_pos = 8 * s2->mb_height;
2379  memset(s2->mbskip_table, 0, s2->mb_stride * s2->mb_height);
2380  }
2381  }
2383  start_code <= SLICE_MAX_START_CODE && last_code != 0) {
2384  const int field_pic = s2->picture_structure != PICT_FRAME;
2385  int mb_y = start_code - SLICE_MIN_START_CODE;
2386  last_code = SLICE_MIN_START_CODE;
2387  if (s2->codec_id != AV_CODEC_ID_MPEG1VIDEO && s2->mb_height > 2800/16)
2388  mb_y += (*buf_ptr&0xE0)<<2;
2389 
2390  mb_y <<= field_pic;
2391  if (s2->picture_structure == PICT_BOTTOM_FIELD)
2392  mb_y++;
2393 
2394  if (buf_end - buf_ptr < 2) {
2395  av_log(s2->avctx, AV_LOG_ERROR, "slice too small\n");
2396  return AVERROR_INVALIDDATA;
2397  }
2398 
2399  if (mb_y >= s2->mb_height) {
2400  av_log(s2->avctx, AV_LOG_ERROR,
2401  "slice below image (%d >= %d)\n", mb_y, s2->mb_height);
2402  return AVERROR_INVALIDDATA;
2403  }
2404 
2405  if (!s2->last_picture_ptr) {
2406  /* Skip B-frames if we do not have reference frames and
2407  * GOP is not closed. */
2408  if (s2->pict_type == AV_PICTURE_TYPE_B) {
2409  if (!s->closed_gop) {
2410  skip_frame = 1;
2411  av_log(s2->avctx, AV_LOG_DEBUG,
2412  "Skipping B slice due to open GOP\n");
2413  break;
2414  }
2415  }
2416  }
2417  if (s2->pict_type == AV_PICTURE_TYPE_I || (s2->avctx->flags2 & AV_CODEC_FLAG2_SHOW_ALL))
2418  s->sync = 1;
2419  if (!s2->next_picture_ptr) {
2420  /* Skip P-frames if we do not have a reference frame or
2421  * we have an invalid header. */
2422  if (s2->pict_type == AV_PICTURE_TYPE_P && !s->sync) {
2423  skip_frame = 1;
2424  av_log(s2->avctx, AV_LOG_DEBUG,
2425  "Skipping P slice due to !sync\n");
2426  break;
2427  }
2428  }
2429  if ((avctx->skip_frame >= AVDISCARD_NONREF &&
2430  s2->pict_type == AV_PICTURE_TYPE_B) ||
2431  (avctx->skip_frame >= AVDISCARD_NONKEY &&
2432  s2->pict_type != AV_PICTURE_TYPE_I) ||
2433  avctx->skip_frame >= AVDISCARD_ALL) {
2434  skip_frame = 1;
2435  break;
2436  }
2437 
2438  if (!s->mpeg_enc_ctx_allocated)
2439  break;
2440 
2441  if (s2->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
2442  if (mb_y < avctx->skip_top ||
2443  mb_y >= s2->mb_height - avctx->skip_bottom)
2444  break;
2445  }
2446 
2447  if (!s2->pict_type) {
2448  av_log(avctx, AV_LOG_ERROR, "Missing picture start code\n");
2449  if (avctx->err_recognition & AV_EF_EXPLODE)
2450  return AVERROR_INVALIDDATA;
2451  break;
2452  }
2453 
2454  if (s->first_slice) {
2455  skip_frame = 0;
2456  s->first_slice = 0;
2457  if ((ret = mpeg_field_start(s2, buf, buf_size)) < 0)
2458  return ret;
2459  }
2460  if (!s2->current_picture_ptr) {
2461  av_log(avctx, AV_LOG_ERROR,
2462  "current_picture not initialized\n");
2463  return AVERROR_INVALIDDATA;
2464  }
2465 
2466  if (HAVE_THREADS &&
2467  (avctx->active_thread_type & FF_THREAD_SLICE) &&
2468  !avctx->hwaccel) {
2469  int threshold = (s2->mb_height * s->slice_count +
2470  s2->slice_context_count / 2) /
2471  s2->slice_context_count;
2472  av_assert0(avctx->thread_count > 1);
2473  if (threshold <= mb_y) {
2474  MpegEncContext *thread_context = s2->thread_context[s->slice_count];
2475 
2476  thread_context->start_mb_y = mb_y;
2477  thread_context->end_mb_y = s2->mb_height;
2478  if (s->slice_count) {
2479  s2->thread_context[s->slice_count - 1]->end_mb_y = mb_y;
2480  ret = ff_update_duplicate_context(thread_context, s2);
2481  if (ret < 0)
2482  return ret;
2483  }
2484  ret = init_get_bits8(&thread_context->gb, buf_ptr, input_size);
2485  if (ret < 0)
2486  return ret;
2487  s->slice_count++;
2488  }
2489  buf_ptr += 2; // FIXME add minimum number of bytes per slice
2490  } else {
2491  ret = mpeg_decode_slice(s2, mb_y, &buf_ptr, input_size);
2492  emms_c();
2493 
2494  if (ret < 0) {
2495  if (avctx->err_recognition & AV_EF_EXPLODE)
2496  return ret;
2497  if (s2->resync_mb_x >= 0 && s2->resync_mb_y >= 0)
2498  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2499  s2->resync_mb_y, s2->mb_x, s2->mb_y,
2501  } else {
2502  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2503  s2->resync_mb_y, s2->mb_x - 1, s2->mb_y,
2505  }
2506  }
2507  }
2508  break;
2509  }
2510  }
2511 }
2512 
2513 static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture,
2514  int *got_output, AVPacket *avpkt)
2515 {
2516  const uint8_t *buf = avpkt->data;
2517  int ret;
2518  int buf_size = avpkt->size;
2519  Mpeg1Context *s = avctx->priv_data;
2520  MpegEncContext *s2 = &s->mpeg_enc_ctx;
2521 
2522  if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == SEQ_END_CODE)) {
2523  /* special case for last picture */
2524  if (s2->low_delay == 0 && s2->next_picture_ptr) {
2525  int ret = av_frame_ref(picture, s2->next_picture_ptr->f);
2526  if (ret < 0)
2527  return ret;
2528 
2529  s2->next_picture_ptr = NULL;
2530 
2531  *got_output = 1;
2532  }
2533  return buf_size;
2534  }
2535 
2536  if (s->mpeg_enc_ctx_allocated == 0 && ( s2->codec_tag == AV_RL32("VCR2")
2537  || s2->codec_tag == AV_RL32("BW10")
2538  ))
2539  vcr2_init_sequence(avctx);
2540 
2541  s->slice_count = 0;
2542 
2543  if (avctx->extradata && !s->extradata_decoded) {
2544  ret = decode_chunks(avctx, picture, got_output,
2545  avctx->extradata, avctx->extradata_size);
2546  if (*got_output) {
2547  av_log(avctx, AV_LOG_ERROR, "picture in extradata\n");
2548  av_frame_unref(picture);
2549  *got_output = 0;
2550  }
2551  s->extradata_decoded = 1;
2552  if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE)) {
2553  s2->current_picture_ptr = NULL;
2554  return ret;
2555  }
2556  }
2557 
2558  ret = decode_chunks(avctx, picture, got_output, buf, buf_size);
2559  if (ret<0 || *got_output) {
2560  s2->current_picture_ptr = NULL;
2561 
2562  if (s->timecode_frame_start != -1 && *got_output) {
2563  char tcbuf[AV_TIMECODE_STR_SIZE];
2564  AVFrameSideData *tcside = av_frame_new_side_data(picture,
2566  sizeof(int64_t));
2567  if (!tcside)
2568  return AVERROR(ENOMEM);
2569  memcpy(tcside->data, &s->timecode_frame_start, sizeof(int64_t));
2570 
2571  av_timecode_make_mpeg_tc_string(tcbuf, s->timecode_frame_start);
2572  av_dict_set(&picture->metadata, "timecode", tcbuf, 0);
2573 
2574  s->timecode_frame_start = -1;
2575  }
2576  }
2577 
2578  return ret;
2579 }
2580 
2581 static void flush(AVCodecContext *avctx)
2582 {
2583  Mpeg1Context *s = avctx->priv_data;
2584 
2585  s->sync = 0;
2586  s->closed_gop = 0;
2587 
2588  av_buffer_unref(&s->a53_buf_ref);
2589  ff_mpeg_flush(avctx);
2590 }
2591 
2593 {
2594  Mpeg1Context *s = avctx->priv_data;
2595 
2596  if (s->mpeg_enc_ctx_allocated)
2597  ff_mpv_common_end(&s->mpeg_enc_ctx);
2598  av_buffer_unref(&s->a53_buf_ref);
2599  return 0;
2600 }
2601 
2603  .p.name = "mpeg1video",
2604  CODEC_LONG_NAME("MPEG-1 video"),
2605  .p.type = AVMEDIA_TYPE_VIDEO,
2606  .p.id = AV_CODEC_ID_MPEG1VIDEO,
2607  .priv_data_size = sizeof(Mpeg1Context),
2609  .close = mpeg_decode_end,
2611  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2613  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2614  .flush = flush,
2615  .p.max_lowres = 3,
2616  UPDATE_THREAD_CONTEXT(mpeg_decode_update_thread_context),
2617  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2618 #if CONFIG_MPEG1_NVDEC_HWACCEL
2619  HWACCEL_NVDEC(mpeg1),
2620 #endif
2621 #if CONFIG_MPEG1_VDPAU_HWACCEL
2622  HWACCEL_VDPAU(mpeg1),
2623 #endif
2624 #if CONFIG_MPEG1_VIDEOTOOLBOX_HWACCEL
2625  HWACCEL_VIDEOTOOLBOX(mpeg1),
2626 #endif
2627  NULL
2628  },
2629 };
2630 
2631 #define M2V_OFFSET(x) offsetof(Mpeg1Context, x)
2632 #define M2V_PARAM AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
2633 
2634 static const AVOption mpeg2video_options[] = {
2635  { "cc_format", "extract a specific Closed Captions format",
2636  M2V_OFFSET(cc_format), AV_OPT_TYPE_INT, { .i64 = CC_FORMAT_AUTO },
2637  CC_FORMAT_AUTO, CC_FORMAT_DVD, M2V_PARAM, .unit = "cc_format" },
2638 
2639  { "auto", "pick first seen CC substream", 0, AV_OPT_TYPE_CONST,
2640  { .i64 = CC_FORMAT_AUTO }, .flags = M2V_PARAM, .unit = "cc_format" },
2641  { "a53", "pick A/53 Part 4 CC substream", 0, AV_OPT_TYPE_CONST,
2642  { .i64 = CC_FORMAT_A53_PART4 }, .flags = M2V_PARAM, .unit = "cc_format" },
2643  { "scte20", "pick SCTE-20 CC substream", 0, AV_OPT_TYPE_CONST,
2644  { .i64 = CC_FORMAT_SCTE20 }, .flags = M2V_PARAM, .unit = "cc_format" },
2645  { "dvd", "pick DVD CC substream", 0, AV_OPT_TYPE_CONST,
2646  { .i64 = CC_FORMAT_DVD }, .flags = M2V_PARAM, .unit = "cc_format" },
2647  { NULL }
2648 };
2649 
2650 static const AVClass mpeg2video_class = {
2651  .class_name = "MPEG-2 video",
2652  .item_name = av_default_item_name,
2653  .option = mpeg2video_options,
2654  .version = LIBAVUTIL_VERSION_INT,
2655  .category = AV_CLASS_CATEGORY_DECODER,
2656 };
2657 
2659  .p.name = "mpeg2video",
2660  CODEC_LONG_NAME("MPEG-2 video"),
2661  .p.type = AVMEDIA_TYPE_VIDEO,
2662  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2663  .p.priv_class = &mpeg2video_class,
2664  .priv_data_size = sizeof(Mpeg1Context),
2666  .close = mpeg_decode_end,
2668  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2670  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2671  .flush = flush,
2672  .p.max_lowres = 3,
2674  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2675 #if CONFIG_MPEG2_DXVA2_HWACCEL
2676  HWACCEL_DXVA2(mpeg2),
2677 #endif
2678 #if CONFIG_MPEG2_D3D11VA_HWACCEL
2679  HWACCEL_D3D11VA(mpeg2),
2680 #endif
2681 #if CONFIG_MPEG2_D3D11VA2_HWACCEL
2682  HWACCEL_D3D11VA2(mpeg2),
2683 #endif
2684 #if CONFIG_MPEG2_D3D12VA_HWACCEL
2685  HWACCEL_D3D12VA(mpeg2),
2686 #endif
2687 #if CONFIG_MPEG2_NVDEC_HWACCEL
2688  HWACCEL_NVDEC(mpeg2),
2689 #endif
2690 #if CONFIG_MPEG2_VAAPI_HWACCEL
2691  HWACCEL_VAAPI(mpeg2),
2692 #endif
2693 #if CONFIG_MPEG2_VDPAU_HWACCEL
2694  HWACCEL_VDPAU(mpeg2),
2695 #endif
2696 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
2697  HWACCEL_VIDEOTOOLBOX(mpeg2),
2698 #endif
2699  NULL
2700  },
2701 };
2702 
2703 //legacy decoder
2705  .p.name = "mpegvideo",
2706  CODEC_LONG_NAME("MPEG-1 video"),
2707  .p.type = AVMEDIA_TYPE_VIDEO,
2708  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2709  .priv_data_size = sizeof(Mpeg1Context),
2711  .close = mpeg_decode_end,
2713  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2715  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2716  .flush = flush,
2717  .p.max_lowres = 3,
2718 };
2719 
2720 typedef struct IPUContext {
2722 
2723  int flags;
2724  DECLARE_ALIGNED(32, int16_t, block)[6][64];
2725 } IPUContext;
2726 
2728  int *got_frame, AVPacket *avpkt)
2729 {
2730  IPUContext *s = avctx->priv_data;
2731  MpegEncContext *m = &s->m;
2732  GetBitContext *gb = &m->gb;
2733  int ret;
2734 
2735  // Check for minimal intra MB size (considering mb header, luma & chroma dc VLC, ac EOB VLC)
2736  if (avpkt->size*8LL < (avctx->width+15)/16 * ((avctx->height+15)/16) * (2LL + 3*4 + 2*2 + 2*6))
2737  return AVERROR_INVALIDDATA;
2738 
2739  ret = ff_get_buffer(avctx, frame, 0);
2740  if (ret < 0)
2741  return ret;
2742 
2743  ret = init_get_bits8(gb, avpkt->data, avpkt->size);
2744  if (ret < 0)
2745  return ret;
2746 
2747  s->flags = get_bits(gb, 8);
2748  m->intra_dc_precision = s->flags & 3;
2749  m->q_scale_type = !!(s->flags & 0x40);
2750  m->intra_vlc_format = !!(s->flags & 0x20);
2751  m->alternate_scan = !!(s->flags & 0x10);
2752 
2753  if (s->flags & 0x10) {
2756  } else {
2759  }
2760 
2761  m->last_dc[0] = m->last_dc[1] = m->last_dc[2] = 1 << (7 + (s->flags & 3));
2762  m->qscale = 1;
2763 
2764  for (int y = 0; y < avctx->height; y += 16) {
2765  int intraquant;
2766 
2767  for (int x = 0; x < avctx->width; x += 16) {
2768  if (x || y) {
2769  if (!get_bits1(gb))
2770  return AVERROR_INVALIDDATA;
2771  }
2772  if (get_bits1(gb)) {
2773  intraquant = 0;
2774  } else {
2775  if (!get_bits1(gb))
2776  return AVERROR_INVALIDDATA;
2777  intraquant = 1;
2778  }
2779 
2780  if (s->flags & 4)
2781  skip_bits1(gb);
2782 
2783  if (intraquant)
2784  m->qscale = mpeg_get_qscale(m);
2785 
2786  memset(s->block, 0, sizeof(s->block));
2787 
2788  for (int n = 0; n < 6; n++) {
2789  if (s->flags & 0x80) {
2791  m->intra_matrix,
2793  m->last_dc, s->block[n],
2794  n, m->qscale);
2795  if (ret >= 0)
2796  m->block_last_index[n] = ret;
2797  } else {
2798  ret = mpeg2_decode_block_intra(m, s->block[n], n);
2799  }
2800 
2801  if (ret < 0)
2802  return ret;
2803  }
2804 
2805  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x,
2806  frame->linesize[0], s->block[0]);
2807  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x + 8,
2808  frame->linesize[0], s->block[1]);
2809  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x,
2810  frame->linesize[0], s->block[2]);
2811  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x + 8,
2812  frame->linesize[0], s->block[3]);
2813  m->idsp.idct_put(frame->data[1] + (y >> 1) * frame->linesize[1] + (x >> 1),
2814  frame->linesize[1], s->block[4]);
2815  m->idsp.idct_put(frame->data[2] + (y >> 1) * frame->linesize[2] + (x >> 1),
2816  frame->linesize[2], s->block[5]);
2817  }
2818  }
2819 
2820  align_get_bits(gb);
2821  if (get_bits_left(gb) != 32)
2822  return AVERROR_INVALIDDATA;
2823 
2826  *got_frame = 1;
2827 
2828  return avpkt->size;
2829 }
2830 
2832 {
2833  IPUContext *s = avctx->priv_data;
2834  MpegEncContext *m = &s->m;
2835 
2836  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
2837 
2838  ff_mpv_decode_init(m, avctx);
2840 
2841  for (int i = 0; i < 64; i++) {
2842  int j = m->idsp.idct_permutation[i];
2844  m->intra_matrix[j] = v;
2845  m->chroma_intra_matrix[j] = v;
2846  }
2847 
2848  for (int i = 0; i < 64; i++) {
2849  int j = m->idsp.idct_permutation[i];
2851  m->inter_matrix[j] = v;
2852  m->chroma_inter_matrix[j] = v;
2853  }
2854 
2855  return 0;
2856 }
2857 
2859 {
2860  IPUContext *s = avctx->priv_data;
2861 
2862  ff_mpv_common_end(&s->m);
2863 
2864  return 0;
2865 }
2866 
2868  .p.name = "ipu",
2869  CODEC_LONG_NAME("IPU Video"),
2870  .p.type = AVMEDIA_TYPE_VIDEO,
2871  .p.id = AV_CODEC_ID_IPU,
2872  .priv_data_size = sizeof(IPUContext),
2873  .init = ipu_decode_init,
2875  .close = ipu_decode_end,
2876  .p.capabilities = AV_CODEC_CAP_DR1,
2877  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
2878 };
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:38
vcr2_init_sequence
static int vcr2_init_sequence(AVCodecContext *avctx)
Definition: mpeg12dec.c:1863
HWACCEL_D3D12VA
#define HWACCEL_D3D12VA(codec)
Definition: hwconfig.h:80
ff_mpv_common_init
av_cold int ff_mpv_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:681
hwconfig.h
AVCodecContext::hwaccel
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1427
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
MB_TYPE_L0
#define MB_TYPE_L0
Definition: mpegutils.h:60
MV_TYPE_16X16
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:261
Mpeg1Context::has_afd
int has_afd
Definition: mpeg12dec.c:82
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
M2V_OFFSET
#define M2V_OFFSET(x)
Definition: mpeg12dec.c:2631
ff_mb_pat_vlc
VLCElem ff_mb_pat_vlc[512]
Definition: mpeg12.c:126
level
uint8_t level
Definition: svq3.c:204
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: defs.h:51
Mpeg1Context::a53_buf_ref
AVBufferRef * a53_buf_ref
Definition: mpeg12dec.c:79
ff_mpeg2_aspect
const AVRational ff_mpeg2_aspect[16]
Definition: mpeg12data.c:380
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:42
show_bits_long
static unsigned int show_bits_long(GetBitContext *s, int n)
Show 0-32 bits.
Definition: get_bits.h:495
mpeg_decode_a53_cc
static int mpeg_decode_a53_cc(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:1928
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:695
FMT_MPEG1
@ FMT_MPEG1
Definition: mpegutils.h:117
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
decode_slice
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:255
ff_mpv_export_qp_table
int ff_mpv_export_qp_table(const MpegEncContext *s, AVFrame *f, const Picture *p, int qp_type)
Definition: mpegvideo_dec.c:505
AV_CLASS_CATEGORY_DECODER
@ AV_CLASS_CATEGORY_DECODER
Definition: log.h:35
AV_STEREO3D_SIDEBYSIDE_QUINCUNX
@ AV_STEREO3D_SIDEBYSIDE_QUINCUNX
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:114
FF_MPV_QSCALE_TYPE_MPEG2
#define FF_MPV_QSCALE_TYPE_MPEG2
Definition: mpegvideodec.h:41
ff_frame_new_side_data_from_buf
int ff_frame_new_side_data_from_buf(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef **buf, AVFrameSideData **psd)
Similar to ff_frame_new_side_data, but using an existing buffer ref.
Definition: decode.c:1838
mem_internal.h
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1220
mpeg_decode_frame
static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture, int *got_output, AVPacket *avpkt)
Definition: mpeg12dec.c:2513
MpegEncContext::gb
GetBitContext gb
Definition: mpegvideo.h:425
AV_EF_COMPLIANT
#define AV_EF_COMPLIANT
consider all spec non compliances as errors
Definition: defs.h:55
SEQ_END_CODE
#define SEQ_END_CODE
Definition: mpeg12.h:28
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, size_t size)
Add a new side data to a frame.
Definition: frame.c:765
check_scantable_index
#define check_scantable_index(ctx, x)
Definition: mpeg12dec.c:151
AV_FRAME_DATA_A53_CC
@ AV_FRAME_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:59
MT_FIELD
#define MT_FIELD
Definition: mpeg12dec.c:423
EXT_START_CODE
#define EXT_START_CODE
Definition: cavs.h:39
MV_TYPE_16X8
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:263
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AVPanScan
Pan Scan area.
Definition: defs.h:240
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1420
SLICE_MAX_START_CODE
#define SLICE_MAX_START_CODE
Definition: cavs.h:38
int64_t
long long int64_t
Definition: coverity.c:34
MB_TYPE_16x8
#define MB_TYPE_16x8
Definition: mpegutils.h:48
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:266
ipu_decode_init
static av_cold int ipu_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:2831
ff_update_duplicate_context
int ff_update_duplicate_context(MpegEncContext *dst, const MpegEncContext *src)
Definition: mpegvideo.c:490
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:344
start_code
static const uint8_t start_code[]
Definition: videotoolboxenc.c:220
ff_mpv_report_decode_progress
void ff_mpv_report_decode_progress(MpegEncContext *s)
Definition: mpegvideo_dec.c:564
w
uint8_t w
Definition: llviddspenc.c:38
HWACCEL_DXVA2
#define HWACCEL_DXVA2(codec)
Definition: hwconfig.h:64
ff_mpegvideo_decoder
const FFCodec ff_mpegvideo_decoder
Definition: mpeg12dec.c:2704
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:522
ipu_decode_end
static av_cold int ipu_decode_end(AVCodecContext *avctx)
Definition: mpeg12dec.c:2858
mpeg_decode_mb
static int mpeg_decode_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpeg12dec.c:428
Mpeg1Context::closed_gop
int closed_gop
Definition: mpeg12dec.c:90
mpeg2_decode_block_intra
static int mpeg2_decode_block_intra(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:329
AVOption
AVOption.
Definition: opt.h:346
HWACCEL_D3D11VA2
#define HWACCEL_D3D11VA2(codec)
Definition: hwconfig.h:66
ff_reverse
const uint8_t ff_reverse[256]
Definition: reverse.c:23
MpegEncContext::last_dc
int last_dc[3]
last DC values for MPEG-1
Definition: mpegvideo.h:175
MB_TYPE_16x16
#define MB_TYPE_16x16
Definition: mpegutils.h:47
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:254
FFCodec
Definition: codec_internal.h:127
PICT_BOTTOM_FIELD
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:37
FF_HW_SIMPLE_CALL
#define FF_HW_SIMPLE_CALL(avctx, function)
Definition: hwaccel_internal.h:174
ff_er_add_slice
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
Definition: error_resilience.c:822
ff_init_block_index
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:845
reverse.h
mpegvideo.h
MB_TYPE_L1
#define MB_TYPE_L1
Definition: mpegutils.h:61
UPDATE_CACHE
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:225
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:616
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
Mpeg1Context::first_slice
int first_slice
Definition: mpeg12dec.c:92
ER_DC_END
#define ER_DC_END
Definition: error_resilience.h:34
mpeg_decode_postinit
static int mpeg_decode_postinit(AVCodecContext *avctx)
Definition: mpeg12dec.c:907
mpegutils.h
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:94
ER_MV_ERROR
#define ER_MV_ERROR
Definition: error_resilience.h:32
thread.h
SEQ_START_CODE
#define SEQ_START_CODE
Definition: mpeg12.h:29
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1397
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:365
MV_TYPE_DMV
#define MV_TYPE_DMV
2 vectors, special mpeg2 Dual Prime Vectors
Definition: mpegvideo.h:265
GET_CACHE
#define GET_CACHE(name, gb)
Definition: get_bits.h:263
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:381
ff_mpeg2_rl_vlc
RL_VLC_ELEM ff_mpeg2_rl_vlc[674]
Definition: mpeg12.c:129
Mpeg1Context::save_aspect
AVRational save_aspect
Definition: mpeg12dec.c:85
MpegEncContext::intra_scantable
ScanTable intra_scantable
Definition: mpegvideo.h:81
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:560
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:335
MB_TYPE_ZERO_MV
#define MB_TYPE_ZERO_MV
Definition: mpeg12dec.c:97
MT_DMV
#define MT_DMV
Definition: mpeg12dec.c:426
ff_mpv_reconstruct_mb
void ff_mpv_reconstruct_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpegvideo_dec.c:1008
ff_mbincr_vlc
VLCElem ff_mbincr_vlc[538]
Definition: mpeg12.c:123
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
decode_chunks
static int decode_chunks(AVCodecContext *avctx, AVFrame *picture, int *got_output, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2174
AVCodecContext::skip_frame
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:1819
mpeg_decode_quant_matrix_extension
static void mpeg_decode_quant_matrix_extension(MpegEncContext *s)
Definition: mpeg12dec.c:1209
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1582
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
wrap
#define wrap(func)
Definition: neontest.h:65
timecode.h
GetBitContext
Definition: get_bits.h:108
AV_EF_BITSTREAM
#define AV_EF_BITSTREAM
detect bitstream specification deviations
Definition: defs.h:49
USES_LIST
#define USES_LIST(a, list)
Definition: mpegutils.h:92
slice_decode_thread
static int slice_decode_thread(AVCodecContext *c, void *arg)
Definition: mpeg12dec.c:1672
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:502
IDCTDSPContext::idct_put
void(* idct_put)(uint8_t *dest, ptrdiff_t line_size, int16_t *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: idctdsp.h:62
MB_TYPE_CBP
#define MB_TYPE_CBP
Definition: mpegutils.h:64
val
static double val(void *priv, double ch)
Definition: aeval.c:78
Mpeg1Context::tmpgexs
int tmpgexs
Definition: mpeg12dec.c:91
HWACCEL_VDPAU
#define HWACCEL_VDPAU(codec)
Definition: hwconfig.h:72
AV_CODEC_FLAG_LOW_DELAY
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
Definition: avcodec.h:334
mpeg12_pixfmt_list_444
static enum AVPixelFormat mpeg12_pixfmt_list_444[]
Definition: mpeg12dec.c:879
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:633
ff_print_debug_info
void ff_print_debug_info(const MpegEncContext *s, const Picture *p, AVFrame *pict)
Definition: mpegvideo_dec.c:498
mpeg1_decode_sequence
static int mpeg1_decode_sequence(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1773
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
HAS_CBP
#define HAS_CBP(a)
Definition: mpegutils.h:94
AVRational::num
int num
Numerator.
Definition: rational.h:59
GOP_START_CODE
#define GOP_START_CODE
Definition: mpeg12.h:30
IPUContext
Definition: mpeg12dec.c:2720
mpeg1_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg1_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:836
ff_mpv_common_end
void ff_mpv_common_end(MpegEncContext *s)
Definition: mpegvideo.c:782
mpeg12.h
mpegvideodec.h
ff_mpeg2video_decoder
const FFCodec ff_mpeg2video_decoder
Definition: mpeg12dec.c:2658
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
Mpeg1Context::frame_rate_index
unsigned frame_rate_index
Definition: mpeg12dec.c:88
ipu_decode_frame
static int ipu_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: mpeg12dec.c:2727
ER_DC_ERROR
#define ER_DC_ERROR
Definition: error_resilience.h:31
av_cold
#define av_cold
Definition: attributes.h:90
mpeg2_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg2_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:847
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:545
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:595
mpeg1_decode_picture
static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1038
flush
static void flush(AVCodecContext *avctx)
Definition: mpeg12dec.c:2581
Mpeg1Context::save_progressive_seq
int save_progressive_seq
Definition: mpeg12dec.c:86
emms_c
#define emms_c()
Definition: emms.h:63
CLOSE_READER
#define CLOSE_READER(name, gb)
Definition: get_bits.h:188
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:524
AVCodecContext::has_b_frames
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:723
A53_MAX_CC_COUNT
#define A53_MAX_CC_COUNT
Definition: mpeg12dec.c:63
ff_er_frame_end
void ff_er_frame_end(ERContext *s, int *decode_error_flags)
Indicate that a frame has finished decoding and perform error concealment in case it has been enabled...
Definition: error_resilience.c:892
Mpeg1Context::repeat_field
int repeat_field
Definition: mpeg12dec.c:75
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:287
stereo3d.h
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
ff_mv_vlc
VLCElem ff_mv_vlc[266]
Definition: mpeg12.c:118
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
ff_mpeg1_aspect
const float ff_mpeg1_aspect[16]
Definition: mpeg12data.c:359
s1
#define s1
Definition: regdef.h:38
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1725
Mpeg1Context::mpeg_enc_ctx_allocated
int mpeg_enc_ctx_allocated
Definition: mpeg12dec.c:74
SHOW_SBITS
#define SHOW_SBITS(name, gb, num)
Definition: get_bits.h:260
ff_mpeg_er_frame_start
void ff_mpeg_er_frame_start(MpegEncContext *s)
Definition: mpeg_er.c:47
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
Mpeg1Context::aspect_ratio_info
unsigned aspect_ratio_info
Definition: mpeg12dec.c:84
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:304
mpeg_decode_sequence_display_extension
static void mpeg_decode_sequence_display_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1128
Mpeg1Context::pan_scan
AVPanScan pan_scan
Definition: mpeg12dec.c:76
get_sbits
static int get_sbits(GetBitContext *s, int n)
Definition: get_bits.h:320
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
PICT_TOP_FIELD
#define PICT_TOP_FIELD
Definition: mpegutils.h:36
decode.h
mpeg12_pixfmt_list_422
static enum AVPixelFormat mpeg12_pixfmt_list_422[]
Definition: mpeg12dec.c:874
SKIP_BITS
#define SKIP_BITS(name, gb, num)
Definition: get_bits.h:241
IS_INTRA
#define IS_INTRA(x, y)
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AVCodecContext::rc_max_rate
int64_t rc_max_rate
maximum bitrate
Definition: avcodec.h:1292
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:272
frame
static AVFrame * frame
Definition: demux_decode.c:54
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:455
arg
const char * arg
Definition: jacosubdec.c:67
rl_vlc
static const VLCElem * rl_vlc[2]
Definition: mobiclip.c:277
if
if(ret)
Definition: filter_design.txt:179
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:219
MB_PTYPE_VLC_BITS
#define MB_PTYPE_VLC_BITS
Definition: mpeg12vlc.h:39
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
Mpeg1Context::save_width
int save_width
Definition: mpeg12dec.c:86
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
PTRDIFF_SPECIFIER
#define PTRDIFF_SPECIFIER
Definition: internal.h:140
NULL
#define NULL
Definition: coverity.c:32
run
uint8_t run
Definition: svq3.c:203
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:695
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
ER_AC_ERROR
#define ER_AC_ERROR
Definition: error_resilience.h:30
SLICE_MIN_START_CODE
#define SLICE_MIN_START_CODE
Definition: mpeg12.h:32
hwaccel_internal.h
Mpeg1Context::sync
int sync
Definition: mpeg12dec.c:89
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:704
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:706
AVCodecContext::bit_rate
int64_t bit_rate
the average bitrate
Definition: avcodec.h:495
mpeg_decode_picture_display_extension
static void mpeg_decode_picture_display_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1152
M2V_PARAM
#define M2V_PARAM
Definition: mpeg12dec.c:2632
MpegEncContext::inter_matrix
uint16_t inter_matrix[64]
Definition: mpegvideo.h:297
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:388
profiles.h
CC_FORMAT_A53_PART4
@ CC_FORMAT_A53_PART4
Definition: mpeg12dec.c:67
LAST_SKIP_BITS
#define LAST_SKIP_BITS(name, gb, num)
Definition: get_bits.h:247
MB_TYPE_QUANT
#define MB_TYPE_QUANT
Definition: mpegutils.h:63
avpriv_find_start_code
const uint8_t * avpriv_find_start_code(const uint8_t *p, const uint8_t *end, uint32_t *state)
lowres
static int lowres
Definition: ffplay.c:333
ff_mpeg1_rl_vlc
RL_VLC_ELEM ff_mpeg1_rl_vlc[680]
Definition: mpeg12.c:128
MB_BTYPE_VLC_BITS
#define MB_BTYPE_VLC_BITS
Definition: mpeg12vlc.h:40
UPDATE_THREAD_CONTEXT
#define UPDATE_THREAD_CONTEXT(func)
Definition: codec_internal.h:281
CC_FORMAT_AUTO
@ CC_FORMAT_AUTO
Definition: mpeg12dec.c:66
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
mpeg12codecs.h
get_vlc2
static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:652
AV_FRAME_DATA_AFD
@ AV_FRAME_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:90
AVCodecContext::level
int level
Encoding level descriptor.
Definition: avcodec.h:1783
Mpeg1Context::save_height
int save_height
Definition: mpeg12dec.c:86
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
MpegEncContext::idsp
IDCTDSPContext idsp
Definition: mpegvideo.h:217
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
startcode.h
s2
#define s2
Definition: regdef.h:39
CC_FORMAT_DVD
@ CC_FORMAT_DVD
Definition: mpeg12dec.c:69
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:218
check_marker
static int check_marker(void *logctx, GetBitContext *s, const char *msg)
Definition: mpegvideodec.h:73
AVCodecContext::flags2
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:509
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:446
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1569
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:365
mpeg2video_options
static const AVOption mpeg2video_options[]
Definition: mpeg12dec.c:2634
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:322
AVPacket::size
int size
Definition: packet.h:523
dc
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff) *mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
MpegEncContext::qscale
int qscale
QP.
Definition: mpegvideo.h:194
AV_CODEC_ID_IPU
@ AV_CODEC_ID_IPU
Definition: codec_id.h:306
AV_FRAME_DATA_PANSCAN
@ AV_FRAME_DATA_PANSCAN
The data is the AVPanScan struct defined in libavcodec.
Definition: frame.h:53
CC_FORMAT_SCTE20
@ CC_FORMAT_SCTE20
Definition: mpeg12dec.c:68
RL_VLC_ELEM
Definition: vlc.h:53
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:354
MT_FRAME
#define MT_FRAME
Definition: mpeg12dec.c:424
codec_internal.h
DECLARE_ALIGNED
#define DECLARE_ALIGNED(n, t, v)
Definition: mem_internal.h:109
shift
static int shift(int a, int b)
Definition: bonk.c:262
IPUContext::flags
int flags
Definition: mpeg12dec.c:2723
MpegEncContext::intra_matrix
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:295
ff_mpeg1_clean_buffers
void ff_mpeg1_clean_buffers(MpegEncContext *s)
Definition: mpeg12.c:106
ff_mpeg1video_decoder
const FFCodec ff_mpeg1video_decoder
Definition: mpeg12dec.c:2602
ff_frame_new_side_data
int ff_frame_new_side_data(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, size_t size, AVFrameSideData **psd)
Wrapper around av_frame_new_side_data, which rejects side data overridden by the demuxer.
Definition: decode.c:1819
AV_RB32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:96
FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
#define FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
The decoder extracts and fills its parameters even if the frame is skipped due to the skip_frame sett...
Definition: codec_internal.h:54
AVFrameSideData::data
uint8_t * data
Definition: frame.h:252
MB_TYPE_SKIP
#define MB_TYPE_SKIP
Definition: mpegutils.h:55
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1594
ff_mpeg_draw_horiz_band
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo_dec.c:535
PICTURE_START_CODE
#define PICTURE_START_CODE
Definition: mpeg12.h:31
USER_START_CODE
#define USER_START_CODE
Definition: cavs.h:40
AVCodecContext::skip_bottom
int skip_bottom
Number of macroblock rows at the bottom which are skipped.
Definition: avcodec.h:1847
AVCodecHWConfigInternal
Definition: hwconfig.h:25
ff_mpeg1_default_intra_matrix
const uint16_t ff_mpeg1_default_intra_matrix[256]
Definition: mpeg12data.c:31
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:164
ff_mpv_frame_start
int ff_mpv_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function called after decoding the header and before a frame is decoded.
Definition: mpegvideo_dec.c:295
MB_TYPE_INTERLACED
#define MB_TYPE_INTERLACED
Definition: mpegutils.h:51
OPEN_READER
#define OPEN_READER(name, gb)
Definition: get_bits.h:177
ff_mpeg_flush
void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:543
height
#define height
Mpeg1Context::has_stereo3d
int has_stereo3d
Definition: mpeg12dec.c:78
mpeg_decode_init
static av_cold int mpeg_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:793
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:114
HWACCEL_D3D11VA
#define HWACCEL_D3D11VA(codec)
Definition: hwconfig.h:78
mpegvideodata.h
attributes.h
ff_mpeg1_decode_block_intra
int ff_mpeg1_decode_block_intra(GetBitContext *gb, const uint16_t *quant_matrix, const uint8_t *scantable, int last_dc[3], int16_t *block, int index, int qscale)
Definition: mpeg12.c:172
MV_TYPE_FIELD
#define MV_TYPE_FIELD
2 vectors, one per field
Definition: mpegvideo.h:264
skip_bits1
static void skip_bits1(GetBitContext *s)
Definition: get_bits.h:413
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
HWACCEL_NVDEC
#define HWACCEL_NVDEC(codec)
Definition: hwconfig.h:68
mpeg2video_class
static const AVClass mpeg2video_class
Definition: mpeg12dec.c:2650
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:126
FF_THREAD_FRAME
#define FF_THREAD_FRAME
Decode more than one frame at once.
Definition: avcodec.h:1593
ff_mpeg2_video_profiles
const AVProfile ff_mpeg2_video_profiles[]
Definition: profiles.c:113
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:194
emms.h
MB_TYPE_L0L1
#define MB_TYPE_L0L1
Definition: mpegutils.h:62
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:305
ff_init_scantable
av_cold void ff_init_scantable(const uint8_t *permutation, ScanTable *st, const uint8_t *src_scantable)
Definition: mpegvideo.c:321
MpegEncContext::block_last_index
int block_last_index[12]
last non zero coefficient in block
Definition: mpegvideo.h:72
av_assert2
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:67
MpegEncContext::chroma_inter_matrix
uint16_t chroma_inter_matrix[64]
Definition: mpegvideo.h:298
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AV_CODEC_FLAG2_SHOW_ALL
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:380
AVCodecContext::properties
unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:1795
ff_alternate_vertical_scan
const uint8_t ff_alternate_vertical_scan[64]
Definition: mpegvideodata.c:63
btype2mb_type
static const uint32_t btype2mb_type[11]
Definition: mpeg12dec.c:109
AVCodecContext::extradata
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:523
show_bits
static unsigned int show_bits(GetBitContext *s, int n)
Show 1-25 bits.
Definition: get_bits.h:371
internal.h
ff_mpv_decode_init
void ff_mpv_decode_init(MpegEncContext *s, AVCodecContext *avctx)
Initialize the given MpegEncContext for decoding.
Definition: mpegvideo_dec.c:45
mpeg_set_cc_format
static void mpeg_set_cc_format(AVCodecContext *avctx, enum Mpeg2ClosedCaptionsFormat format, const char *label)
Definition: mpeg12dec.c:1914
AV_STEREO3D_TOPBOTTOM
@ AV_STEREO3D_TOPBOTTOM
Views are on top of each other.
Definition: stereo3d.h:76
IS_QUANT
#define IS_QUANT(a)
Definition: mpegutils.h:88
ff_mpeg12_init_vlcs
av_cold void ff_mpeg12_init_vlcs(void)
Definition: mpeg12.c:164
FF_DEBUG_STARTCODE
#define FF_DEBUG_STARTCODE
Definition: avcodec.h:1404
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_d2q
AVRational av_d2q(double d, int max)
Convert a double precision floating point number to a rational.
Definition: rational.c:106
MB_PAT_VLC_BITS
#define MB_PAT_VLC_BITS
Definition: mpeg12vlc.h:38
mpeg1_decode_block_inter
static int mpeg1_decode_block_inter(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:160
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:576
ptype2mb_type
static const uint32_t ptype2mb_type[7]
Definition: mpeg12dec.c:99
IPUContext::m
MpegEncContext m
Definition: mpeg12dec.c:2721
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
MpegEncContext::intra_vlc_format
int intra_vlc_format
Definition: mpegvideo.h:444
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:702
MAX_INDEX
#define MAX_INDEX
Definition: mpeg12dec.c:150
AVCodecContext::height
int height
Definition: avcodec.h:618
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:657
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:666
HWACCEL_VIDEOTOOLBOX
#define HWACCEL_VIDEOTOOLBOX(codec)
Definition: hwconfig.h:74
Mpeg1Context::stereo3d
AVStereo3D stereo3d
Definition: mpeg12dec.c:77
idctdsp.h
avcodec.h
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
GET_RL_VLC
#define GET_RL_VLC(level, run, name, gb, table, bits, max_depth, need_update)
Definition: get_bits.h:606
ff_zigzag_direct
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
ff_mpeg12_frame_rate_tab
const AVRational ff_mpeg12_frame_rate_tab[]
Definition: mpeg12framerate.c:24
mpeg_decode_gop
static int mpeg_decode_gop(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2143
ret
ret
Definition: filter_design.txt:187
AV_EF_AGGRESSIVE
#define AV_EF_AGGRESSIVE
consider things that a sane encoder/muxer should not do as an error
Definition: defs.h:56
pred
static const float pred[4]
Definition: siprdata.h:259
AV_FRAME_DATA_GOP_TIMECODE
@ AV_FRAME_DATA_GOP_TIMECODE
The GOP timecode in 25 bit timecode format.
Definition: frame.h:125
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
ff_mpeg1_default_non_intra_matrix
const uint16_t ff_mpeg1_default_non_intra_matrix[64]
Definition: mpeg12data.c:42
align_get_bits
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:561
TEX_VLC_BITS
#define TEX_VLC_BITS
Definition: dvdec.c:147
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
mpeg_get_pixelformat
static enum AVPixelFormat mpeg_get_pixelformat(AVCodecContext *avctx)
Definition: mpeg12dec.c:884
AV_CODEC_FLAG2_CHUNKS
#define AV_CODEC_FLAG2_CHUNKS
Input bitstream might be truncated at a packet boundaries instead of only at frame boundaries.
Definition: avcodec.h:371
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
mpeg12data.h
mpeg_field_start
static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1284
ff_mpeg_update_thread_context
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: mpegvideo_dec.c:62
skip_1stop_8data_bits
static int skip_1stop_8data_bits(GetBitContext *gb)
Definition: get_bits.h:700
AVCodecContext
main external API structure.
Definition: avcodec.h:445
AVCodecContext::active_thread_type
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1601
av_timecode_make_mpeg_tc_string
char * av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit)
Get the timecode string from the 25-bit timecode format (MPEG GOP format).
Definition: timecode.c:168
MpegEncContext::intra_dc_precision
int intra_dc_precision
Definition: mpegvideo.h:438
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1612
SHOW_UBITS
#define SHOW_UBITS(name, gb, num)
Definition: get_bits.h:259
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
mpeg12dec.h
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:705
AVRational::den
int den
Denominator.
Definition: rational.h:60
error_resilience.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
FF_HW_CALL
#define FF_HW_CALL(avctx, function,...)
Definition: hwaccel_internal.h:171
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
AVCodecContext::profile
int profile
profile
Definition: avcodec.h:1639
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:662
Mpeg1Context::cc_format
enum Mpeg2ClosedCaptionsFormat cc_format
Definition: mpeg12dec.c:80
sign_extend
static av_const int sign_extend(int val, unsigned bits)
Definition: mathops.h:133
ff_mpv_frame_end
void ff_mpv_frame_end(MpegEncContext *s)
Definition: mpegvideo_dec.c:490
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
Mpeg1Context::slice_count
int slice_count
Definition: mpeg12dec.c:83
AVCodecContext::ticks_per_frame
attribute_deprecated int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:576
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:76
FF_CODEC_PROPERTY_CLOSED_CAPTIONS
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:1797
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
av_buffer_realloc
int av_buffer_realloc(AVBufferRef **pbuf, size_t size)
Reallocate a given buffer.
Definition: buffer.c:183
ff_mb_ptype_vlc
VLCElem ff_mb_ptype_vlc[64]
Definition: mpeg12.c:124
AVCodecContext::debug
int debug
debug
Definition: avcodec.h:1396
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:633
get_dmv
static int get_dmv(MpegEncContext *s)
Definition: mpeg12dec.c:414
tc
#define tc
Definition: regdef.h:69
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mpeg_decode_end
static av_cold int mpeg_decode_end(AVCodecContext *avctx)
Definition: mpeg12dec.c:2592
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
MpegEncContext::inter_scantable
ScanTable inter_scantable
if inter == intra then intra should be used to reduce the cache usage
Definition: mpegvideo.h:76
IDCTDSPContext::idct_permutation
uint8_t idct_permutation[64]
IDCT input permutation.
Definition: idctdsp.h:86
ff_ipu_decoder
const FFCodec ff_ipu_decoder
Definition: mpeg12dec.c:2867
av_stereo3d_create_side_data
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:34
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:250
ER_MV_END
#define ER_MV_END
Definition: error_resilience.h:35
MpegEncContext::q_scale_type
int q_scale_type
Definition: mpegvideo.h:442
AVCodecContext::codec_tag
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:470
Mpeg1Context::mpeg_enc_ctx
MpegEncContext mpeg_enc_ctx
Definition: mpeg12dec.c:73
ff_tlog
#define ff_tlog(ctx,...)
Definition: internal.h:153
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
MV_DIR_FORWARD
#define MV_DIR_FORWARD
Definition: mpegvideo.h:257
AVPacket
This structure stores compressed data.
Definition: packet.h:499
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:472
ScanTable::permutated
uint8_t permutated[64]
Definition: mpegvideo.h:60
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
mpeg_get_qscale
static int mpeg_get_qscale(MpegEncContext *s)
Definition: mpegvideodec.h:64
mpeg_decode_sequence_extension
static void mpeg_decode_sequence_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1085
HWACCEL_VAAPI
#define HWACCEL_VAAPI(codec)
Definition: hwconfig.h:70
mpeg_er.h
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:618
int32_t
int32_t
Definition: audioconvert.c:56
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:389
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
Mpeg1Context::frame_rate_ext
AVRational frame_rate_ext
Definition: mpeg12dec.c:87
mpeg_decode_motion
static int mpeg_decode_motion(MpegEncContext *s, int fcode, int pred)
Definition: mpeg12dec.c:124
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
IPUContext::block
int16_t block[6][64]
Definition: mpeg12dec.c:2724
ff_mb_btype_vlc
VLCElem ff_mb_btype_vlc[64]
Definition: mpeg12.c:125
mpeg_decode_user_data
static void mpeg_decode_user_data(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:2073
h
h
Definition: vp9dsp_template.c:2038
MpegEncContext::end_mb_y
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:143
Mpeg2ClosedCaptionsFormat
Mpeg2ClosedCaptionsFormat
Definition: mpeg12dec.c:65
ER_AC_END
#define ER_AC_END
Definition: error_resilience.h:33
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:173
av_image_check_sar
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:323
MV_VLC_BITS
#define MV_VLC_BITS
Definition: mpeg12vlc.h:34
Mpeg1Context::timecode_frame_start
int64_t timecode_frame_start
Definition: mpeg12dec.c:94
MpegEncContext::start_mb_y
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:142
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:215
MpegEncContext::alternate_scan
int alternate_scan
Definition: mpegvideo.h:445
DECODE_SLICE_OK
#define DECODE_SLICE_OK
Definition: mpeg12dec.c:1388
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
DECODE_SLICE_ERROR
#define DECODE_SLICE_ERROR
Definition: mpeg12dec.c:1387
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:244
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:67
load_matrix
static int load_matrix(MpegEncContext *s, uint16_t matrix0[64], uint16_t matrix1[64], int intra)
Definition: mpeg12dec.c:1186
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:642
decode_dc
static int decode_dc(GetBitContext *gb, int component)
Definition: mpeg12dec.h:28
Mpeg1Context::afd
uint8_t afd
Definition: mpeg12dec.c:81
Mpeg1Context
Definition: mpeg12dec.c:72
MpegEncContext::chroma_intra_matrix
uint16_t chroma_intra_matrix[64]
Definition: mpegvideo.h:296
mpeg_decode_picture_coding_extension
static int mpeg_decode_picture_coding_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1223
Mpeg1Context::extradata_decoded
int extradata_decoded
Definition: mpeg12dec.c:93
mpeg2_decode_block_non_intra
static int mpeg2_decode_block_non_intra(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:244
MB_TYPE_INTRA
#define MB_TYPE_INTRA
Definition: mpegutils.h:66
MBINCR_VLC_BITS
#define MBINCR_VLC_BITS
Definition: mpeg12vlc.h:37
mpeg_decode_slice
static int mpeg_decode_slice(MpegEncContext *s, int mb_y, const uint8_t **buf, int buf_size)
Decode a slice.
Definition: mpeg12dec.c:1396