FFmpeg
h264_slice.c
Go to the documentation of this file.
1 /*
2  * H.26L/H.264/AVC/JVT/14496-10/... decoder
3  * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * H.264 / AVC / MPEG-4 part10 codec.
25  * @author Michael Niedermayer <michaelni@gmx.at>
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/display.h"
30 #include "libavutil/imgutils.h"
31 #include "libavutil/stereo3d.h"
32 #include "internal.h"
33 #include "cabac.h"
34 #include "cabac_functions.h"
35 #include "error_resilience.h"
36 #include "avcodec.h"
37 #include "h264.h"
38 #include "h264dec.h"
39 #include "h264data.h"
40 #include "h264chroma.h"
41 #include "h264_mvpred.h"
42 #include "h264_ps.h"
43 #include "golomb.h"
44 #include "mathops.h"
45 #include "mpegutils.h"
46 #include "mpegvideo.h"
47 #include "rectangle.h"
48 #include "thread.h"
49 
50 static const uint8_t field_scan[16+1] = {
51  0 + 0 * 4, 0 + 1 * 4, 1 + 0 * 4, 0 + 2 * 4,
52  0 + 3 * 4, 1 + 1 * 4, 1 + 2 * 4, 1 + 3 * 4,
53  2 + 0 * 4, 2 + 1 * 4, 2 + 2 * 4, 2 + 3 * 4,
54  3 + 0 * 4, 3 + 1 * 4, 3 + 2 * 4, 3 + 3 * 4,
55 };
56 
57 static const uint8_t field_scan8x8[64+1] = {
58  0 + 0 * 8, 0 + 1 * 8, 0 + 2 * 8, 1 + 0 * 8,
59  1 + 1 * 8, 0 + 3 * 8, 0 + 4 * 8, 1 + 2 * 8,
60  2 + 0 * 8, 1 + 3 * 8, 0 + 5 * 8, 0 + 6 * 8,
61  0 + 7 * 8, 1 + 4 * 8, 2 + 1 * 8, 3 + 0 * 8,
62  2 + 2 * 8, 1 + 5 * 8, 1 + 6 * 8, 1 + 7 * 8,
63  2 + 3 * 8, 3 + 1 * 8, 4 + 0 * 8, 3 + 2 * 8,
64  2 + 4 * 8, 2 + 5 * 8, 2 + 6 * 8, 2 + 7 * 8,
65  3 + 3 * 8, 4 + 1 * 8, 5 + 0 * 8, 4 + 2 * 8,
66  3 + 4 * 8, 3 + 5 * 8, 3 + 6 * 8, 3 + 7 * 8,
67  4 + 3 * 8, 5 + 1 * 8, 6 + 0 * 8, 5 + 2 * 8,
68  4 + 4 * 8, 4 + 5 * 8, 4 + 6 * 8, 4 + 7 * 8,
69  5 + 3 * 8, 6 + 1 * 8, 6 + 2 * 8, 5 + 4 * 8,
70  5 + 5 * 8, 5 + 6 * 8, 5 + 7 * 8, 6 + 3 * 8,
71  7 + 0 * 8, 7 + 1 * 8, 6 + 4 * 8, 6 + 5 * 8,
72  6 + 6 * 8, 6 + 7 * 8, 7 + 2 * 8, 7 + 3 * 8,
73  7 + 4 * 8, 7 + 5 * 8, 7 + 6 * 8, 7 + 7 * 8,
74 };
75 
76 static const uint8_t field_scan8x8_cavlc[64+1] = {
77  0 + 0 * 8, 1 + 1 * 8, 2 + 0 * 8, 0 + 7 * 8,
78  2 + 2 * 8, 2 + 3 * 8, 2 + 4 * 8, 3 + 3 * 8,
79  3 + 4 * 8, 4 + 3 * 8, 4 + 4 * 8, 5 + 3 * 8,
80  5 + 5 * 8, 7 + 0 * 8, 6 + 6 * 8, 7 + 4 * 8,
81  0 + 1 * 8, 0 + 3 * 8, 1 + 3 * 8, 1 + 4 * 8,
82  1 + 5 * 8, 3 + 1 * 8, 2 + 5 * 8, 4 + 1 * 8,
83  3 + 5 * 8, 5 + 1 * 8, 4 + 5 * 8, 6 + 1 * 8,
84  5 + 6 * 8, 7 + 1 * 8, 6 + 7 * 8, 7 + 5 * 8,
85  0 + 2 * 8, 0 + 4 * 8, 0 + 5 * 8, 2 + 1 * 8,
86  1 + 6 * 8, 4 + 0 * 8, 2 + 6 * 8, 5 + 0 * 8,
87  3 + 6 * 8, 6 + 0 * 8, 4 + 6 * 8, 6 + 2 * 8,
88  5 + 7 * 8, 6 + 4 * 8, 7 + 2 * 8, 7 + 6 * 8,
89  1 + 0 * 8, 1 + 2 * 8, 0 + 6 * 8, 3 + 0 * 8,
90  1 + 7 * 8, 3 + 2 * 8, 2 + 7 * 8, 4 + 2 * 8,
91  3 + 7 * 8, 5 + 2 * 8, 4 + 7 * 8, 5 + 4 * 8,
92  6 + 3 * 8, 6 + 5 * 8, 7 + 3 * 8, 7 + 7 * 8,
93 };
94 
95 // zigzag_scan8x8_cavlc[i] = zigzag_scan8x8[(i/4) + 16*(i%4)]
96 static const uint8_t zigzag_scan8x8_cavlc[64+1] = {
97  0 + 0 * 8, 1 + 1 * 8, 1 + 2 * 8, 2 + 2 * 8,
98  4 + 1 * 8, 0 + 5 * 8, 3 + 3 * 8, 7 + 0 * 8,
99  3 + 4 * 8, 1 + 7 * 8, 5 + 3 * 8, 6 + 3 * 8,
100  2 + 7 * 8, 6 + 4 * 8, 5 + 6 * 8, 7 + 5 * 8,
101  1 + 0 * 8, 2 + 0 * 8, 0 + 3 * 8, 3 + 1 * 8,
102  3 + 2 * 8, 0 + 6 * 8, 4 + 2 * 8, 6 + 1 * 8,
103  2 + 5 * 8, 2 + 6 * 8, 6 + 2 * 8, 5 + 4 * 8,
104  3 + 7 * 8, 7 + 3 * 8, 4 + 7 * 8, 7 + 6 * 8,
105  0 + 1 * 8, 3 + 0 * 8, 0 + 4 * 8, 4 + 0 * 8,
106  2 + 3 * 8, 1 + 5 * 8, 5 + 1 * 8, 5 + 2 * 8,
107  1 + 6 * 8, 3 + 5 * 8, 7 + 1 * 8, 4 + 5 * 8,
108  4 + 6 * 8, 7 + 4 * 8, 5 + 7 * 8, 6 + 7 * 8,
109  0 + 2 * 8, 2 + 1 * 8, 1 + 3 * 8, 5 + 0 * 8,
110  1 + 4 * 8, 2 + 4 * 8, 6 + 0 * 8, 4 + 3 * 8,
111  0 + 7 * 8, 4 + 4 * 8, 7 + 2 * 8, 3 + 6 * 8,
112  5 + 5 * 8, 6 + 5 * 8, 6 + 6 * 8, 7 + 7 * 8,
113 };
114 
115 static void release_unused_pictures(H264Context *h, int remove_current)
116 {
117  int i;
118 
119  /* release non reference frames */
120  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
121  if (h->DPB[i].f->buf[0] && !h->DPB[i].reference &&
122  (remove_current || &h->DPB[i] != h->cur_pic_ptr)) {
123  ff_h264_unref_picture(h, &h->DPB[i]);
124  }
125  }
126 }
127 
128 static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
129 {
130  const H264Context *h = sl->h264;
131  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
132 
133  av_fast_malloc(&sl->bipred_scratchpad, &sl->bipred_scratchpad_allocated, 16 * 6 * alloc_size);
134  // edge emu needs blocksize + filter length - 1
135  // (= 21x21 for H.264)
136  av_fast_malloc(&sl->edge_emu_buffer, &sl->edge_emu_buffer_allocated, alloc_size * 2 * 21);
137 
139  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
141  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
142 
143  if (!sl->bipred_scratchpad || !sl->edge_emu_buffer ||
144  !sl->top_borders[0] || !sl->top_borders[1]) {
147  av_freep(&sl->top_borders[0]);
148  av_freep(&sl->top_borders[1]);
149 
152  sl->top_borders_allocated[0] = 0;
153  sl->top_borders_allocated[1] = 0;
154  return AVERROR(ENOMEM);
155  }
156 
157  return 0;
158 }
159 
161 {
162  const int big_mb_num = h->mb_stride * (h->mb_height + 1) + 1;
163  const int mb_array_size = h->mb_stride * h->mb_height;
164  const int b4_stride = h->mb_width * 4 + 1;
165  const int b4_array_size = b4_stride * h->mb_height * 4;
166 
167  h->qscale_table_pool = av_buffer_pool_init(big_mb_num + h->mb_stride,
169  h->mb_type_pool = av_buffer_pool_init((big_mb_num + h->mb_stride) *
170  sizeof(uint32_t), av_buffer_allocz);
171  h->motion_val_pool = av_buffer_pool_init(2 * (b4_array_size + 4) *
172  sizeof(int16_t), av_buffer_allocz);
173  h->ref_index_pool = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
174 
175  if (!h->qscale_table_pool || !h->mb_type_pool || !h->motion_val_pool ||
176  !h->ref_index_pool) {
181  return AVERROR(ENOMEM);
182  }
183 
184  return 0;
185 }
186 
188 {
189  int i, ret = 0;
190 
191  av_assert0(!pic->f->data[0]);
192 
193  pic->tf.f = pic->f;
194  ret = ff_thread_get_buffer(h->avctx, &pic->tf, pic->reference ?
196  if (ret < 0)
197  goto fail;
198 
199  if (h->avctx->hwaccel) {
200  const AVHWAccel *hwaccel = h->avctx->hwaccel;
202  if (hwaccel->frame_priv_data_size) {
204  if (!pic->hwaccel_priv_buf)
205  return AVERROR(ENOMEM);
207  }
208  }
209  if (CONFIG_GRAY && !h->avctx->hwaccel && h->flags & AV_CODEC_FLAG_GRAY && pic->f->data[2]) {
210  int h_chroma_shift, v_chroma_shift;
212  &h_chroma_shift, &v_chroma_shift);
213 
214  for(i=0; i<AV_CEIL_RSHIFT(pic->f->height, v_chroma_shift); i++) {
215  memset(pic->f->data[1] + pic->f->linesize[1]*i,
216  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
217  memset(pic->f->data[2] + pic->f->linesize[2]*i,
218  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
219  }
220  }
221 
222  if (!h->qscale_table_pool) {
223  ret = init_table_pools(h);
224  if (ret < 0)
225  goto fail;
226  }
227 
230  if (!pic->qscale_table_buf || !pic->mb_type_buf)
231  goto fail;
232 
233  pic->mb_type = (uint32_t*)pic->mb_type_buf->data + 2 * h->mb_stride + 1;
234  pic->qscale_table = pic->qscale_table_buf->data + 2 * h->mb_stride + 1;
235 
236  for (i = 0; i < 2; i++) {
239  if (!pic->motion_val_buf[i] || !pic->ref_index_buf[i])
240  goto fail;
241 
242  pic->motion_val[i] = (int16_t (*)[2])pic->motion_val_buf[i]->data + 4;
243  pic->ref_index[i] = pic->ref_index_buf[i]->data;
244  }
245 
246  return 0;
247 fail:
248  ff_h264_unref_picture(h, pic);
249  return (ret < 0) ? ret : AVERROR(ENOMEM);
250 }
251 
253 {
254  int i;
255 
256  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
257  if (!h->DPB[i].f->buf[0])
258  return i;
259  }
260  return AVERROR_INVALIDDATA;
261 }
262 
263 
264 #define IN_RANGE(a, b, size) (((void*)(a) >= (void*)(b)) && ((void*)(a) < (void*)((b) + (size))))
265 
266 #define REBASE_PICTURE(pic, new_ctx, old_ctx) \
267  (((pic) && (pic) >= (old_ctx)->DPB && \
268  (pic) < (old_ctx)->DPB + H264_MAX_PICTURE_COUNT) ? \
269  &(new_ctx)->DPB[(pic) - (old_ctx)->DPB] : NULL)
270 
272  H264Context *new_base,
273  H264Context *old_base)
274 {
275  int i;
276 
277  for (i = 0; i < count; i++) {
278  av_assert1(!from[i] ||
279  IN_RANGE(from[i], old_base, 1) ||
280  IN_RANGE(from[i], old_base->DPB, H264_MAX_PICTURE_COUNT));
281  to[i] = REBASE_PICTURE(from[i], new_base, old_base);
282  }
283 }
284 
286 
288  const AVCodecContext *src)
289 {
290  H264Context *h = dst->priv_data, *h1 = src->priv_data;
291  int inited = h->context_initialized, err = 0;
292  int need_reinit = 0;
293  int i, ret;
294 
295  if (dst == src)
296  return 0;
297 
298  // We can't fail if SPS isn't set at it breaks current skip_frame code
299  //if (!h1->ps.sps)
300  // return AVERROR_INVALIDDATA;
301 
302  if (inited &&
303  (h->width != h1->width ||
304  h->height != h1->height ||
305  h->mb_width != h1->mb_width ||
306  h->mb_height != h1->mb_height ||
307  !h->ps.sps ||
308  h->ps.sps->bit_depth_luma != h1->ps.sps->bit_depth_luma ||
309  h->ps.sps->chroma_format_idc != h1->ps.sps->chroma_format_idc ||
310  h->ps.sps->colorspace != h1->ps.sps->colorspace)) {
311  need_reinit = 1;
312  }
313 
314  /* copy block_offset since frame_start may not be called */
315  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
316 
317  // SPS/PPS
318  for (i = 0; i < FF_ARRAY_ELEMS(h->ps.sps_list); i++) {
319  av_buffer_unref(&h->ps.sps_list[i]);
320  if (h1->ps.sps_list[i]) {
321  h->ps.sps_list[i] = av_buffer_ref(h1->ps.sps_list[i]);
322  if (!h->ps.sps_list[i])
323  return AVERROR(ENOMEM);
324  }
325  }
326  for (i = 0; i < FF_ARRAY_ELEMS(h->ps.pps_list); i++) {
327  av_buffer_unref(&h->ps.pps_list[i]);
328  if (h1->ps.pps_list[i]) {
329  h->ps.pps_list[i] = av_buffer_ref(h1->ps.pps_list[i]);
330  if (!h->ps.pps_list[i])
331  return AVERROR(ENOMEM);
332  }
333  }
334 
337  h->ps.pps = NULL;
338  h->ps.sps = NULL;
339  if (h1->ps.pps_ref) {
340  h->ps.pps_ref = av_buffer_ref(h1->ps.pps_ref);
341  if (!h->ps.pps_ref)
342  return AVERROR(ENOMEM);
343  h->ps.pps = (const PPS*)h->ps.pps_ref->data;
344  }
345  if (h1->ps.sps_ref) {
346  h->ps.sps_ref = av_buffer_ref(h1->ps.sps_ref);
347  if (!h->ps.sps_ref)
348  return AVERROR(ENOMEM);
349  h->ps.sps = (const SPS*)h->ps.sps_ref->data;
350  }
351 
352  if (need_reinit || !inited) {
353  h->width = h1->width;
354  h->height = h1->height;
355  h->mb_height = h1->mb_height;
356  h->mb_width = h1->mb_width;
357  h->mb_num = h1->mb_num;
358  h->mb_stride = h1->mb_stride;
359  h->b_stride = h1->b_stride;
360  h->x264_build = h1->x264_build;
361 
362  if (h->context_initialized || h1->context_initialized) {
363  if ((err = h264_slice_header_init(h)) < 0) {
364  av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
365  return err;
366  }
367  }
368 
369  /* copy block_offset since frame_start may not be called */
370  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
371  }
372 
373  h->avctx->coded_height = h1->avctx->coded_height;
374  h->avctx->coded_width = h1->avctx->coded_width;
375  h->avctx->width = h1->avctx->width;
376  h->avctx->height = h1->avctx->height;
377  h->width_from_caller = h1->width_from_caller;
378  h->height_from_caller = h1->height_from_caller;
379  h->coded_picture_number = h1->coded_picture_number;
380  h->first_field = h1->first_field;
381  h->picture_structure = h1->picture_structure;
382  h->mb_aff_frame = h1->mb_aff_frame;
383  h->droppable = h1->droppable;
384 
385  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
386  ff_h264_unref_picture(h, &h->DPB[i]);
387  if (h1->DPB[i].f->buf[0] &&
388  (ret = ff_h264_ref_picture(h, &h->DPB[i], &h1->DPB[i])) < 0)
389  return ret;
390  }
391 
392  h->cur_pic_ptr = REBASE_PICTURE(h1->cur_pic_ptr, h, h1);
394  if (h1->cur_pic.f->buf[0]) {
395  ret = ff_h264_ref_picture(h, &h->cur_pic, &h1->cur_pic);
396  if (ret < 0)
397  return ret;
398  }
399 
400  h->enable_er = h1->enable_er;
401  h->workaround_bugs = h1->workaround_bugs;
402  h->droppable = h1->droppable;
403 
404  // extradata/NAL handling
405  h->is_avc = h1->is_avc;
406  h->nal_length_size = h1->nal_length_size;
407 
408  memcpy(&h->poc, &h1->poc, sizeof(h->poc));
409 
410  memcpy(h->short_ref, h1->short_ref, sizeof(h->short_ref));
411  memcpy(h->long_ref, h1->long_ref, sizeof(h->long_ref));
412  memcpy(h->delayed_pic, h1->delayed_pic, sizeof(h->delayed_pic));
413  memcpy(h->last_pocs, h1->last_pocs, sizeof(h->last_pocs));
414 
415  h->next_output_pic = h1->next_output_pic;
416  h->next_outputed_poc = h1->next_outputed_poc;
417 
418  memcpy(h->mmco, h1->mmco, sizeof(h->mmco));
419  h->nb_mmco = h1->nb_mmco;
420  h->mmco_reset = h1->mmco_reset;
421  h->explicit_ref_marking = h1->explicit_ref_marking;
422  h->long_ref_count = h1->long_ref_count;
423  h->short_ref_count = h1->short_ref_count;
424 
425  copy_picture_range(h->short_ref, h1->short_ref, 32, h, h1);
426  copy_picture_range(h->long_ref, h1->long_ref, 32, h, h1);
427  copy_picture_range(h->delayed_pic, h1->delayed_pic,
428  MAX_DELAYED_PIC_COUNT + 2, h, h1);
429 
430  h->frame_recovered = h1->frame_recovered;
431 
433  if (h1->sei.a53_caption.buf_ref) {
434  h->sei.a53_caption.buf_ref = av_buffer_ref(h1->sei.a53_caption.buf_ref);
435  if (!h->sei.a53_caption.buf_ref)
436  return AVERROR(ENOMEM);
437  }
438 
439  if (!h->cur_pic_ptr)
440  return 0;
441 
442  if (!h->droppable) {
444  h->poc.prev_poc_msb = h->poc.poc_msb;
445  h->poc.prev_poc_lsb = h->poc.poc_lsb;
446  }
449 
450  h->recovery_frame = h1->recovery_frame;
451 
452  return err;
453 }
454 
456 {
457  H264Picture *pic;
458  int i, ret;
459  const int pixel_shift = h->pixel_shift;
460 
461  if (!ff_thread_can_start_frame(h->avctx)) {
462  av_log(h->avctx, AV_LOG_ERROR, "Attempt to start a frame outside SETUP state\n");
463  return -1;
464  }
465 
467  h->cur_pic_ptr = NULL;
468 
469  i = find_unused_picture(h);
470  if (i < 0) {
471  av_log(h->avctx, AV_LOG_ERROR, "no frame buffer available\n");
472  return i;
473  }
474  pic = &h->DPB[i];
475 
476  pic->reference = h->droppable ? 0 : h->picture_structure;
479  pic->frame_num = h->poc.frame_num;
480  /*
481  * Zero key_frame here; IDR markings per slice in frame or fields are ORed
482  * in later.
483  * See decode_nal_units().
484  */
485  pic->f->key_frame = 0;
486  pic->mmco_reset = 0;
487  pic->recovered = 0;
488  pic->invalid_gap = 0;
490 
491  pic->f->pict_type = h->slice_ctx[0].slice_type;
492 
493  pic->f->crop_left = h->crop_left;
494  pic->f->crop_right = h->crop_right;
495  pic->f->crop_top = h->crop_top;
496  pic->f->crop_bottom = h->crop_bottom;
497 
498  if ((ret = alloc_picture(h, pic)) < 0)
499  return ret;
500 
501  h->cur_pic_ptr = pic;
503  if (CONFIG_ERROR_RESILIENCE) {
505  }
506 
507  if ((ret = ff_h264_ref_picture(h, &h->cur_pic, h->cur_pic_ptr)) < 0)
508  return ret;
509 
510  for (i = 0; i < h->nb_slice_ctx; i++) {
511  h->slice_ctx[i].linesize = h->cur_pic_ptr->f->linesize[0];
512  h->slice_ctx[i].uvlinesize = h->cur_pic_ptr->f->linesize[1];
513  }
514 
515  if (CONFIG_ERROR_RESILIENCE && h->enable_er) {
519  }
520 
521  for (i = 0; i < 16; i++) {
522  h->block_offset[i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
523  h->block_offset[48 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
524  }
525  for (i = 0; i < 16; i++) {
526  h->block_offset[16 + i] =
527  h->block_offset[32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
528  h->block_offset[48 + 16 + i] =
529  h->block_offset[48 + 32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
530  }
531 
532  /* We mark the current picture as non-reference after allocating it, so
533  * that if we break out due to an error it can be released automatically
534  * in the next ff_mpv_frame_start().
535  */
536  h->cur_pic_ptr->reference = 0;
537 
538  h->cur_pic_ptr->field_poc[0] = h->cur_pic_ptr->field_poc[1] = INT_MAX;
539 
540  h->next_output_pic = NULL;
541 
542  h->postpone_filter = 0;
543 
545 
546  if (h->sei.unregistered.x264_build >= 0)
548 
549  assert(h->cur_pic_ptr->long_ref == 0);
550 
551  return 0;
552 }
553 
555  uint8_t *src_y,
556  uint8_t *src_cb, uint8_t *src_cr,
557  int linesize, int uvlinesize,
558  int simple)
559 {
560  uint8_t *top_border;
561  int top_idx = 1;
562  const int pixel_shift = h->pixel_shift;
563  int chroma444 = CHROMA444(h);
564  int chroma422 = CHROMA422(h);
565 
566  src_y -= linesize;
567  src_cb -= uvlinesize;
568  src_cr -= uvlinesize;
569 
570  if (!simple && FRAME_MBAFF(h)) {
571  if (sl->mb_y & 1) {
572  if (!MB_MBAFF(sl)) {
573  top_border = sl->top_borders[0][sl->mb_x];
574  AV_COPY128(top_border, src_y + 15 * linesize);
575  if (pixel_shift)
576  AV_COPY128(top_border + 16, src_y + 15 * linesize + 16);
577  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
578  if (chroma444) {
579  if (pixel_shift) {
580  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
581  AV_COPY128(top_border + 48, src_cb + 15 * uvlinesize + 16);
582  AV_COPY128(top_border + 64, src_cr + 15 * uvlinesize);
583  AV_COPY128(top_border + 80, src_cr + 15 * uvlinesize + 16);
584  } else {
585  AV_COPY128(top_border + 16, src_cb + 15 * uvlinesize);
586  AV_COPY128(top_border + 32, src_cr + 15 * uvlinesize);
587  }
588  } else if (chroma422) {
589  if (pixel_shift) {
590  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
591  AV_COPY128(top_border + 48, src_cr + 15 * uvlinesize);
592  } else {
593  AV_COPY64(top_border + 16, src_cb + 15 * uvlinesize);
594  AV_COPY64(top_border + 24, src_cr + 15 * uvlinesize);
595  }
596  } else {
597  if (pixel_shift) {
598  AV_COPY128(top_border + 32, src_cb + 7 * uvlinesize);
599  AV_COPY128(top_border + 48, src_cr + 7 * uvlinesize);
600  } else {
601  AV_COPY64(top_border + 16, src_cb + 7 * uvlinesize);
602  AV_COPY64(top_border + 24, src_cr + 7 * uvlinesize);
603  }
604  }
605  }
606  }
607  } else if (MB_MBAFF(sl)) {
608  top_idx = 0;
609  } else
610  return;
611  }
612 
613  top_border = sl->top_borders[top_idx][sl->mb_x];
614  /* There are two lines saved, the line above the top macroblock
615  * of a pair, and the line above the bottom macroblock. */
616  AV_COPY128(top_border, src_y + 16 * linesize);
617  if (pixel_shift)
618  AV_COPY128(top_border + 16, src_y + 16 * linesize + 16);
619 
620  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
621  if (chroma444) {
622  if (pixel_shift) {
623  AV_COPY128(top_border + 32, src_cb + 16 * linesize);
624  AV_COPY128(top_border + 48, src_cb + 16 * linesize + 16);
625  AV_COPY128(top_border + 64, src_cr + 16 * linesize);
626  AV_COPY128(top_border + 80, src_cr + 16 * linesize + 16);
627  } else {
628  AV_COPY128(top_border + 16, src_cb + 16 * linesize);
629  AV_COPY128(top_border + 32, src_cr + 16 * linesize);
630  }
631  } else if (chroma422) {
632  if (pixel_shift) {
633  AV_COPY128(top_border + 32, src_cb + 16 * uvlinesize);
634  AV_COPY128(top_border + 48, src_cr + 16 * uvlinesize);
635  } else {
636  AV_COPY64(top_border + 16, src_cb + 16 * uvlinesize);
637  AV_COPY64(top_border + 24, src_cr + 16 * uvlinesize);
638  }
639  } else {
640  if (pixel_shift) {
641  AV_COPY128(top_border + 32, src_cb + 8 * uvlinesize);
642  AV_COPY128(top_border + 48, src_cr + 8 * uvlinesize);
643  } else {
644  AV_COPY64(top_border + 16, src_cb + 8 * uvlinesize);
645  AV_COPY64(top_border + 24, src_cr + 8 * uvlinesize);
646  }
647  }
648  }
649 }
650 
651 /**
652  * Initialize implicit_weight table.
653  * @param field 0/1 initialize the weight for interlaced MBAFF
654  * -1 initializes the rest
655  */
657 {
658  int ref0, ref1, i, cur_poc, ref_start, ref_count0, ref_count1;
659 
660  for (i = 0; i < 2; i++) {
661  sl->pwt.luma_weight_flag[i] = 0;
662  sl->pwt.chroma_weight_flag[i] = 0;
663  }
664 
665  if (field < 0) {
666  if (h->picture_structure == PICT_FRAME) {
667  cur_poc = h->cur_pic_ptr->poc;
668  } else {
669  cur_poc = h->cur_pic_ptr->field_poc[h->picture_structure - 1];
670  }
671  if (sl->ref_count[0] == 1 && sl->ref_count[1] == 1 && !FRAME_MBAFF(h) &&
672  sl->ref_list[0][0].poc + (int64_t)sl->ref_list[1][0].poc == 2LL * cur_poc) {
673  sl->pwt.use_weight = 0;
674  sl->pwt.use_weight_chroma = 0;
675  return;
676  }
677  ref_start = 0;
678  ref_count0 = sl->ref_count[0];
679  ref_count1 = sl->ref_count[1];
680  } else {
681  cur_poc = h->cur_pic_ptr->field_poc[field];
682  ref_start = 16;
683  ref_count0 = 16 + 2 * sl->ref_count[0];
684  ref_count1 = 16 + 2 * sl->ref_count[1];
685  }
686 
687  sl->pwt.use_weight = 2;
688  sl->pwt.use_weight_chroma = 2;
689  sl->pwt.luma_log2_weight_denom = 5;
691 
692  for (ref0 = ref_start; ref0 < ref_count0; ref0++) {
693  int64_t poc0 = sl->ref_list[0][ref0].poc;
694  for (ref1 = ref_start; ref1 < ref_count1; ref1++) {
695  int w = 32;
696  if (!sl->ref_list[0][ref0].parent->long_ref && !sl->ref_list[1][ref1].parent->long_ref) {
697  int poc1 = sl->ref_list[1][ref1].poc;
698  int td = av_clip_int8(poc1 - poc0);
699  if (td) {
700  int tb = av_clip_int8(cur_poc - poc0);
701  int tx = (16384 + (FFABS(td) >> 1)) / td;
702  int dist_scale_factor = (tb * tx + 32) >> 8;
703  if (dist_scale_factor >= -64 && dist_scale_factor <= 128)
704  w = 64 - dist_scale_factor;
705  }
706  }
707  if (field < 0) {
708  sl->pwt.implicit_weight[ref0][ref1][0] =
709  sl->pwt.implicit_weight[ref0][ref1][1] = w;
710  } else {
711  sl->pwt.implicit_weight[ref0][ref1][field] = w;
712  }
713  }
714  }
715 }
716 
717 /**
718  * initialize scan tables
719  */
721 {
722  int i;
723  for (i = 0; i < 16; i++) {
724 #define TRANSPOSE(x) ((x) >> 2) | (((x) << 2) & 0xF)
726  h->field_scan[i] = TRANSPOSE(field_scan[i]);
727 #undef TRANSPOSE
728  }
729  for (i = 0; i < 64; i++) {
730 #define TRANSPOSE(x) ((x) >> 3) | (((x) & 7) << 3)
735 #undef TRANSPOSE
736  }
737  if (h->ps.sps->transform_bypass) { // FIXME same ugly
738  memcpy(h->zigzag_scan_q0 , ff_zigzag_scan , sizeof(h->zigzag_scan_q0 ));
739  memcpy(h->zigzag_scan8x8_q0 , ff_zigzag_direct , sizeof(h->zigzag_scan8x8_q0 ));
741  memcpy(h->field_scan_q0 , field_scan , sizeof(h->field_scan_q0 ));
742  memcpy(h->field_scan8x8_q0 , field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
744  } else {
745  memcpy(h->zigzag_scan_q0 , h->zigzag_scan , sizeof(h->zigzag_scan_q0 ));
746  memcpy(h->zigzag_scan8x8_q0 , h->zigzag_scan8x8 , sizeof(h->zigzag_scan8x8_q0 ));
748  memcpy(h->field_scan_q0 , h->field_scan , sizeof(h->field_scan_q0 ));
749  memcpy(h->field_scan8x8_q0 , h->field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
751  }
752 }
753 
754 static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
755 {
756 #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
757  (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
758  CONFIG_H264_NVDEC_HWACCEL + \
759  CONFIG_H264_VAAPI_HWACCEL + \
760  CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
761  CONFIG_H264_VDPAU_HWACCEL)
763  const enum AVPixelFormat *choices = pix_fmts;
764  int i;
765 
766  switch (h->ps.sps->bit_depth_luma) {
767  case 9:
768  if (CHROMA444(h)) {
769  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
770  *fmt++ = AV_PIX_FMT_GBRP9;
771  } else
772  *fmt++ = AV_PIX_FMT_YUV444P9;
773  } else if (CHROMA422(h))
774  *fmt++ = AV_PIX_FMT_YUV422P9;
775  else
776  *fmt++ = AV_PIX_FMT_YUV420P9;
777  break;
778  case 10:
779  if (CHROMA444(h)) {
780  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
781  *fmt++ = AV_PIX_FMT_GBRP10;
782  } else
783  *fmt++ = AV_PIX_FMT_YUV444P10;
784  } else if (CHROMA422(h))
785  *fmt++ = AV_PIX_FMT_YUV422P10;
786  else
787  *fmt++ = AV_PIX_FMT_YUV420P10;
788  break;
789  case 12:
790  if (CHROMA444(h)) {
791  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
792  *fmt++ = AV_PIX_FMT_GBRP12;
793  } else
794  *fmt++ = AV_PIX_FMT_YUV444P12;
795  } else if (CHROMA422(h))
796  *fmt++ = AV_PIX_FMT_YUV422P12;
797  else
798  *fmt++ = AV_PIX_FMT_YUV420P12;
799  break;
800  case 14:
801  if (CHROMA444(h)) {
802  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
803  *fmt++ = AV_PIX_FMT_GBRP14;
804  } else
805  *fmt++ = AV_PIX_FMT_YUV444P14;
806  } else if (CHROMA422(h))
807  *fmt++ = AV_PIX_FMT_YUV422P14;
808  else
809  *fmt++ = AV_PIX_FMT_YUV420P14;
810  break;
811  case 8:
812 #if CONFIG_H264_VDPAU_HWACCEL
813  *fmt++ = AV_PIX_FMT_VDPAU;
814 #endif
815 #if CONFIG_H264_NVDEC_HWACCEL
816  *fmt++ = AV_PIX_FMT_CUDA;
817 #endif
818  if (CHROMA444(h)) {
819  if (h->avctx->colorspace == AVCOL_SPC_RGB)
820  *fmt++ = AV_PIX_FMT_GBRP;
821  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
822  *fmt++ = AV_PIX_FMT_YUVJ444P;
823  else
824  *fmt++ = AV_PIX_FMT_YUV444P;
825  } else if (CHROMA422(h)) {
827  *fmt++ = AV_PIX_FMT_YUVJ422P;
828  else
829  *fmt++ = AV_PIX_FMT_YUV422P;
830  } else {
831 #if CONFIG_H264_DXVA2_HWACCEL
832  *fmt++ = AV_PIX_FMT_DXVA2_VLD;
833 #endif
834 #if CONFIG_H264_D3D11VA_HWACCEL
835  *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
836  *fmt++ = AV_PIX_FMT_D3D11;
837 #endif
838 #if CONFIG_H264_VAAPI_HWACCEL
839  *fmt++ = AV_PIX_FMT_VAAPI;
840 #endif
841 #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
842  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
843 #endif
844  if (h->avctx->codec->pix_fmts)
845  choices = h->avctx->codec->pix_fmts;
846  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
847  *fmt++ = AV_PIX_FMT_YUVJ420P;
848  else
849  *fmt++ = AV_PIX_FMT_YUV420P;
850  }
851  break;
852  default:
854  "Unsupported bit depth %d\n", h->ps.sps->bit_depth_luma);
855  return AVERROR_INVALIDDATA;
856  }
857 
858  *fmt = AV_PIX_FMT_NONE;
859 
860  for (i=0; choices[i] != AV_PIX_FMT_NONE; i++)
861  if (choices[i] == h->avctx->pix_fmt && !force_callback)
862  return choices[i];
863  return ff_thread_get_format(h->avctx, choices);
864 }
865 
866 /* export coded and cropped frame dimensions to AVCodecContext */
868 {
869  const SPS *sps = (const SPS*)h->ps.sps;
870  int cr = sps->crop_right;
871  int cl = sps->crop_left;
872  int ct = sps->crop_top;
873  int cb = sps->crop_bottom;
874  int width = h->width - (cr + cl);
875  int height = h->height - (ct + cb);
876  av_assert0(sps->crop_right + sps->crop_left < (unsigned)h->width);
877  av_assert0(sps->crop_top + sps->crop_bottom < (unsigned)h->height);
878 
879  /* handle container cropping */
880  if (h->width_from_caller > 0 && h->height_from_caller > 0 &&
881  !sps->crop_top && !sps->crop_left &&
882  FFALIGN(h->width_from_caller, 16) == FFALIGN(width, 16) &&
883  FFALIGN(h->height_from_caller, 16) == FFALIGN(height, 16) &&
884  h->width_from_caller <= width &&
885  h->height_from_caller <= height) {
887  height = h->height_from_caller;
888  cl = 0;
889  ct = 0;
890  cr = h->width - width;
891  cb = h->height - height;
892  } else {
893  h->width_from_caller = 0;
894  h->height_from_caller = 0;
895  }
896 
897  h->avctx->coded_width = h->width;
898  h->avctx->coded_height = h->height;
899  h->avctx->width = width;
900  h->avctx->height = height;
901  h->crop_right = cr;
902  h->crop_left = cl;
903  h->crop_top = ct;
904  h->crop_bottom = cb;
905 }
906 
908 {
909  const SPS *sps = h->ps.sps;
910  int i, ret;
911 
912  ff_set_sar(h->avctx, sps->sar);
914  &h->chroma_x_shift, &h->chroma_y_shift);
915 
916  if (sps->timing_info_present_flag) {
917  int64_t den = sps->time_scale;
918  if (h->x264_build < 44U)
919  den *= 2;
921  sps->num_units_in_tick * h->avctx->ticks_per_frame, den, 1 << 30);
922  }
923 
925 
926  h->first_field = 0;
927  h->prev_interlaced_frame = 1;
928 
929  init_scan_tables(h);
930  ret = ff_h264_alloc_tables(h);
931  if (ret < 0) {
932  av_log(h->avctx, AV_LOG_ERROR, "Could not allocate memory\n");
933  goto fail;
934  }
935 
936  if (sps->bit_depth_luma < 8 || sps->bit_depth_luma > 14 ||
937  sps->bit_depth_luma == 11 || sps->bit_depth_luma == 13
938  ) {
939  av_log(h->avctx, AV_LOG_ERROR, "Unsupported bit depth %d\n",
940  sps->bit_depth_luma);
941  ret = AVERROR_INVALIDDATA;
942  goto fail;
943  }
944 
945  h->cur_bit_depth_luma =
948  h->pixel_shift = sps->bit_depth_luma > 8;
950  h->bit_depth_luma = sps->bit_depth_luma;
951 
953  sps->chroma_format_idc);
957  sps->chroma_format_idc);
959 
960  if (!HAVE_THREADS || !(h->avctx->active_thread_type & FF_THREAD_SLICE)) {
961  ret = ff_h264_slice_context_init(h, &h->slice_ctx[0]);
962  if (ret < 0) {
963  av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
964  goto fail;
965  }
966  } else {
967  for (i = 0; i < h->nb_slice_ctx; i++) {
968  H264SliceContext *sl = &h->slice_ctx[i];
969 
970  sl->h264 = h;
971  sl->intra4x4_pred_mode = h->intra4x4_pred_mode + i * 8 * 2 * h->mb_stride;
972  sl->mvd_table[0] = h->mvd_table[0] + i * 8 * 2 * h->mb_stride;
973  sl->mvd_table[1] = h->mvd_table[1] + i * 8 * 2 * h->mb_stride;
974 
975  if ((ret = ff_h264_slice_context_init(h, sl)) < 0) {
976  av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
977  goto fail;
978  }
979  }
980  }
981 
982  h->context_initialized = 1;
983 
984  return 0;
985 fail:
987  h->context_initialized = 0;
988  return ret;
989 }
990 
992 {
993  switch (a) {
997  default:
998  return a;
999  }
1000 }
1001 
1002 static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
1003 {
1004  const SPS *sps;
1005  int needs_reinit = 0, must_reinit, ret;
1006 
1007  if (first_slice) {
1008  av_buffer_unref(&h->ps.pps_ref);
1009  h->ps.pps = NULL;
1010  h->ps.pps_ref = av_buffer_ref(h->ps.pps_list[sl->pps_id]);
1011  if (!h->ps.pps_ref)
1012  return AVERROR(ENOMEM);
1013  h->ps.pps = (const PPS*)h->ps.pps_ref->data;
1014  }
1015 
1016  if (h->ps.sps != (const SPS*)h->ps.sps_list[h->ps.pps->sps_id]->data) {
1017  av_buffer_unref(&h->ps.sps_ref);
1018  h->ps.sps = NULL;
1019  h->ps.sps_ref = av_buffer_ref(h->ps.sps_list[h->ps.pps->sps_id]);
1020  if (!h->ps.sps_ref)
1021  return AVERROR(ENOMEM);
1022  h->ps.sps = (const SPS*)h->ps.sps_ref->data;
1023 
1024  if (h->mb_width != h->ps.sps->mb_width ||
1025  h->mb_height != h->ps.sps->mb_height ||
1028  )
1029  needs_reinit = 1;
1030 
1031  if (h->bit_depth_luma != h->ps.sps->bit_depth_luma ||
1033  needs_reinit = 1;
1034  }
1035  sps = h->ps.sps;
1036 
1037  must_reinit = (h->context_initialized &&
1038  ( 16*sps->mb_width != h->avctx->coded_width
1039  || 16*sps->mb_height != h->avctx->coded_height
1040  || h->cur_bit_depth_luma != sps->bit_depth_luma
1042  || h->mb_width != sps->mb_width
1043  || h->mb_height != sps->mb_height
1044  ));
1045  if (h->avctx->pix_fmt == AV_PIX_FMT_NONE
1047  must_reinit = 1;
1048 
1049  if (first_slice && av_cmp_q(sps->sar, h->avctx->sample_aspect_ratio))
1050  must_reinit = 1;
1051 
1052  if (!h->setup_finished) {
1053  h->avctx->profile = ff_h264_get_profile(sps);
1054  h->avctx->level = sps->level_idc;
1055  h->avctx->refs = sps->ref_frame_count;
1056 
1057  h->mb_width = sps->mb_width;
1058  h->mb_height = sps->mb_height;
1059  h->mb_num = h->mb_width * h->mb_height;
1060  h->mb_stride = h->mb_width + 1;
1061 
1062  h->b_stride = h->mb_width * 4;
1063 
1064  h->chroma_y_shift = sps->chroma_format_idc <= 1; // 400 uses yuv420p
1065 
1066  h->width = 16 * h->mb_width;
1067  h->height = 16 * h->mb_height;
1068 
1069  init_dimensions(h);
1070 
1071  if (sps->video_signal_type_present_flag) {
1072  h->avctx->color_range = sps->full_range > 0 ? AVCOL_RANGE_JPEG
1073  : AVCOL_RANGE_MPEG;
1075  if (h->avctx->colorspace != sps->colorspace)
1076  needs_reinit = 1;
1078  h->avctx->color_trc = sps->color_trc;
1079  h->avctx->colorspace = sps->colorspace;
1080  }
1081  }
1082 
1083  if (h->sei.alternative_transfer.present &&
1087  }
1088  }
1090 
1091  if (!h->context_initialized || must_reinit || needs_reinit) {
1092  int flush_changes = h->context_initialized;
1093  h->context_initialized = 0;
1094  if (sl != h->slice_ctx) {
1096  "changing width %d -> %d / height %d -> %d on "
1097  "slice %d\n",
1098  h->width, h->avctx->coded_width,
1099  h->height, h->avctx->coded_height,
1100  h->current_slice + 1);
1101  return AVERROR_INVALIDDATA;
1102  }
1103 
1104  av_assert1(first_slice);
1105 
1106  if (flush_changes)
1108 
1109  if ((ret = get_pixel_format(h, 1)) < 0)
1110  return ret;
1111  h->avctx->pix_fmt = ret;
1112 
1113  av_log(h->avctx, AV_LOG_VERBOSE, "Reinit context to %dx%d, "
1114  "pix_fmt: %s\n", h->width, h->height, av_get_pix_fmt_name(h->avctx->pix_fmt));
1115 
1116  if ((ret = h264_slice_header_init(h)) < 0) {
1118  "h264_slice_header_init() failed\n");
1119  return ret;
1120  }
1121  }
1122 
1123  return 0;
1124 }
1125 
1127 {
1128  const SPS *sps = h->ps.sps;
1129  H264Picture *cur = h->cur_pic_ptr;
1130 
1131  cur->f->interlaced_frame = 0;
1132  cur->f->repeat_pict = 0;
1133 
1134  /* Signal interlacing information externally. */
1135  /* Prioritize picture timing SEI information over used
1136  * decoding process if it exists. */
1137 
1140  switch (pt->pic_struct) {
1142  break;
1145  cur->f->interlaced_frame = 1;
1146  break;
1149  if (FIELD_OR_MBAFF_PICTURE(h))
1150  cur->f->interlaced_frame = 1;
1151  else
1152  // try to flag soft telecine progressive
1154  break;
1157  /* Signal the possibility of telecined film externally
1158  * (pic_struct 5,6). From these hints, let the applications
1159  * decide if they apply deinterlacing. */
1160  cur->f->repeat_pict = 1;
1161  break;
1163  cur->f->repeat_pict = 2;
1164  break;
1166  cur->f->repeat_pict = 4;
1167  break;
1168  }
1169 
1170  if ((pt->ct_type & 3) &&
1172  cur->f->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
1173  } else {
1174  /* Derive interlacing flag from used decoding process. */
1176  }
1178 
1179  if (cur->field_poc[0] != cur->field_poc[1]) {
1180  /* Derive top_field_first from field pocs. */
1181  cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1];
1182  } else {
1184  /* Use picture timing SEI information. Even if it is a
1185  * information of a past frame, better than nothing. */
1188  cur->f->top_field_first = 1;
1189  else
1190  cur->f->top_field_first = 0;
1191  } else if (cur->f->interlaced_frame) {
1192  /* Default to top field first when pic_struct_present_flag
1193  * is not set but interlaced frame detected */
1194  cur->f->top_field_first = 1;
1195  } else {
1196  /* Most likely progressive */
1197  cur->f->top_field_first = 0;
1198  }
1199  }
1200 
1201  if (h->sei.frame_packing.present &&
1206  AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f);
1207  if (stereo) {
1208  switch (fp->arrangement_type) {
1210  stereo->type = AV_STEREO3D_CHECKERBOARD;
1211  break;
1213  stereo->type = AV_STEREO3D_COLUMNS;
1214  break;
1216  stereo->type = AV_STEREO3D_LINES;
1217  break;
1219  if (fp->quincunx_sampling_flag)
1221  else
1222  stereo->type = AV_STEREO3D_SIDEBYSIDE;
1223  break;
1225  stereo->type = AV_STEREO3D_TOPBOTTOM;
1226  break;
1228  stereo->type = AV_STEREO3D_FRAMESEQUENCE;
1229  break;
1230  case H264_SEI_FPA_TYPE_2D:
1231  stereo->type = AV_STEREO3D_2D;
1232  break;
1233  }
1234 
1235  if (fp->content_interpretation_type == 2)
1236  stereo->flags = AV_STEREO3D_FLAG_INVERT;
1237 
1240  stereo->view = AV_STEREO3D_VIEW_LEFT;
1241  else
1242  stereo->view = AV_STEREO3D_VIEW_RIGHT;
1243  }
1244  }
1245  }
1246 
1247  if (h->sei.display_orientation.present &&
1252  double angle = o->anticlockwise_rotation * 360 / (double) (1 << 16);
1253  AVFrameSideData *rotation = av_frame_new_side_data(cur->f,
1255  sizeof(int32_t) * 9);
1256  if (rotation) {
1257  av_display_rotation_set((int32_t *)rotation->data, angle);
1258  av_display_matrix_flip((int32_t *)rotation->data,
1259  o->hflip, o->vflip);
1260  }
1261  }
1262 
1263  if (h->sei.afd.present) {
1265  sizeof(uint8_t));
1266 
1267  if (sd) {
1269  h->sei.afd.present = 0;
1270  }
1271  }
1272 
1273  if (h->sei.a53_caption.buf_ref) {
1274  H264SEIA53Caption *a53 = &h->sei.a53_caption;
1275 
1277  if (!sd)
1278  av_buffer_unref(&a53->buf_ref);
1279  a53->buf_ref = NULL;
1280 
1282  }
1283 
1284  if (h->sei.picture_timing.timecode_cnt > 0) {
1285  uint32_t tc = 0;
1286  uint32_t *tc_sd;
1287 
1288  AVFrameSideData *tcside = av_frame_new_side_data(cur->f,
1290  sizeof(uint32_t)*4);
1291  if (!tcside)
1292  return AVERROR(ENOMEM);
1293 
1294  tc_sd = (uint32_t*)tcside->data;
1295  tc_sd[0] = h->sei.picture_timing.timecode_cnt;
1296 
1297  for (int i = 0; i < tc_sd[0]; i++) {
1298  uint32_t frames;
1299 
1300  /* For SMPTE 12-M timecodes, frame count is a special case if > 30 FPS.
1301  See SMPTE ST 12-1:2014 Sec 12.1 for more info. */
1302  if (av_cmp_q(h->avctx->framerate, (AVRational) {30, 1}) == 1) {
1303  frames = h->sei.picture_timing.timecode[i].frame / 2;
1304  if (h->sei.picture_timing.timecode[i].frame % 2 == 1) {
1305  if (av_cmp_q(h->avctx->framerate, (AVRational) {50, 1}) == 0)
1306  tc |= (1 << 7);
1307  else
1308  tc |= (1 << 23);
1309  }
1310  } else {
1311  frames = h->sei.picture_timing.timecode[i].frame;
1312  }
1313 
1314  tc |= h->sei.picture_timing.timecode[i].dropframe << 30;
1315  tc |= (frames / 10) << 28;
1316  tc |= (frames % 10) << 24;
1317  tc |= (h->sei.picture_timing.timecode[i].seconds / 10) << 20;
1318  tc |= (h->sei.picture_timing.timecode[i].seconds % 10) << 16;
1319  tc |= (h->sei.picture_timing.timecode[i].minutes / 10) << 12;
1320  tc |= (h->sei.picture_timing.timecode[i].minutes % 10) << 8;
1321  tc |= (h->sei.picture_timing.timecode[i].hours / 10) << 4;
1322  tc |= (h->sei.picture_timing.timecode[i].hours % 10);
1323 
1324  tc_sd[i + 1] = tc;
1325  }
1327  }
1328 
1329  return 0;
1330 }
1331 
1333 {
1334  const SPS *sps = h->ps.sps;
1335  H264Picture *out = h->cur_pic_ptr;
1336  H264Picture *cur = h->cur_pic_ptr;
1337  int i, pics, out_of_order, out_idx;
1338 
1339  cur->mmco_reset = h->mmco_reset;
1340  h->mmco_reset = 0;
1341 
1342  if (sps->bitstream_restriction_flag ||
1345  }
1346 
1347  for (i = 0; 1; i++) {
1348  if(i == MAX_DELAYED_PIC_COUNT || cur->poc < h->last_pocs[i]){
1349  if(i)
1350  h->last_pocs[i-1] = cur->poc;
1351  break;
1352  } else if(i) {
1353  h->last_pocs[i-1]= h->last_pocs[i];
1354  }
1355  }
1356  out_of_order = MAX_DELAYED_PIC_COUNT - i;
1357  if( cur->f->pict_type == AV_PICTURE_TYPE_B
1358  || (h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > INT_MIN && h->last_pocs[MAX_DELAYED_PIC_COUNT-1] - (int64_t)h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > 2))
1359  out_of_order = FFMAX(out_of_order, 1);
1360  if (out_of_order == MAX_DELAYED_PIC_COUNT) {
1361  av_log(h->avctx, AV_LOG_VERBOSE, "Invalid POC %d<%d\n", cur->poc, h->last_pocs[0]);
1362  for (i = 1; i < MAX_DELAYED_PIC_COUNT; i++)
1363  h->last_pocs[i] = INT_MIN;
1364  h->last_pocs[0] = cur->poc;
1365  cur->mmco_reset = 1;
1366  } else if(h->avctx->has_b_frames < out_of_order && !sps->bitstream_restriction_flag){
1367  int loglevel = h->avctx->frame_number > 1 ? AV_LOG_WARNING : AV_LOG_VERBOSE;
1368  av_log(h->avctx, loglevel, "Increasing reorder buffer to %d\n", out_of_order);
1369  h->avctx->has_b_frames = out_of_order;
1370  }
1371 
1372  pics = 0;
1373  while (h->delayed_pic[pics])
1374  pics++;
1375 
1377 
1378  h->delayed_pic[pics++] = cur;
1379  if (cur->reference == 0)
1380  cur->reference = DELAYED_PIC_REF;
1381 
1382  out = h->delayed_pic[0];
1383  out_idx = 0;
1384  for (i = 1; h->delayed_pic[i] &&
1385  !h->delayed_pic[i]->f->key_frame &&
1386  !h->delayed_pic[i]->mmco_reset;
1387  i++)
1388  if (h->delayed_pic[i]->poc < out->poc) {
1389  out = h->delayed_pic[i];
1390  out_idx = i;
1391  }
1392  if (h->avctx->has_b_frames == 0 &&
1393  (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset))
1394  h->next_outputed_poc = INT_MIN;
1395  out_of_order = out->poc < h->next_outputed_poc;
1396 
1397  if (out_of_order || pics > h->avctx->has_b_frames) {
1398  out->reference &= ~DELAYED_PIC_REF;
1399  for (i = out_idx; h->delayed_pic[i]; i++)
1400  h->delayed_pic[i] = h->delayed_pic[i + 1];
1401  }
1402  if (!out_of_order && pics > h->avctx->has_b_frames) {
1403  h->next_output_pic = out;
1404  if (out_idx == 0 && h->delayed_pic[0] && (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset)) {
1405  h->next_outputed_poc = INT_MIN;
1406  } else
1407  h->next_outputed_poc = out->poc;
1408 
1409  if (out->recovered) {
1410  // We have reached an recovery point and all frames after it in
1411  // display order are "recovered".
1413  }
1415 
1416  if (!out->recovered) {
1417  if (!(h->avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) &&
1419  h->next_output_pic = NULL;
1420  } else {
1421  out->f->flags |= AV_FRAME_FLAG_CORRUPT;
1422  }
1423  }
1424  } else {
1425  av_log(h->avctx, AV_LOG_DEBUG, "no picture %s\n", out_of_order ? "ooo" : "");
1426  }
1427 
1428  return 0;
1429 }
1430 
1431 /* This function is called right after decoding the slice header for a first
1432  * slice in a field (or a frame). It decides whether we are decoding a new frame
1433  * or a second field in a pair and does the necessary setup.
1434  */
1436  const H2645NAL *nal, int first_slice)
1437 {
1438  int i;
1439  const SPS *sps;
1440 
1441  int last_pic_structure, last_pic_droppable, ret;
1442 
1443  ret = h264_init_ps(h, sl, first_slice);
1444  if (ret < 0)
1445  return ret;
1446 
1447  sps = h->ps.sps;
1448 
1449  if (sps && sps->bitstream_restriction_flag &&
1450  h->avctx->has_b_frames < sps->num_reorder_frames) {
1452  }
1453 
1454  last_pic_droppable = h->droppable;
1455  last_pic_structure = h->picture_structure;
1456  h->droppable = (nal->ref_idc == 0);
1458 
1459  h->poc.frame_num = sl->frame_num;
1460  h->poc.poc_lsb = sl->poc_lsb;
1462  h->poc.delta_poc[0] = sl->delta_poc[0];
1463  h->poc.delta_poc[1] = sl->delta_poc[1];
1464 
1465  /* Shorten frame num gaps so we don't have to allocate reference
1466  * frames just to throw them away */
1467  if (h->poc.frame_num != h->poc.prev_frame_num) {
1468  int unwrap_prev_frame_num = h->poc.prev_frame_num;
1469  int max_frame_num = 1 << sps->log2_max_frame_num;
1470 
1471  if (unwrap_prev_frame_num > h->poc.frame_num)
1472  unwrap_prev_frame_num -= max_frame_num;
1473 
1474  if ((h->poc.frame_num - unwrap_prev_frame_num) > sps->ref_frame_count) {
1475  unwrap_prev_frame_num = (h->poc.frame_num - sps->ref_frame_count) - 1;
1476  if (unwrap_prev_frame_num < 0)
1477  unwrap_prev_frame_num += max_frame_num;
1478 
1479  h->poc.prev_frame_num = unwrap_prev_frame_num;
1480  }
1481  }
1482 
1483  /* See if we have a decoded first field looking for a pair...
1484  * Here, we're using that to see if we should mark previously
1485  * decode frames as "finished".
1486  * We have to do that before the "dummy" in-between frame allocation,
1487  * since that can modify h->cur_pic_ptr. */
1488  if (h->first_field) {
1489  int last_field = last_pic_structure == PICT_BOTTOM_FIELD;
1490  av_assert0(h->cur_pic_ptr);
1491  av_assert0(h->cur_pic_ptr->f->buf[0]);
1492  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1493 
1494  /* Mark old field/frame as completed */
1495  if (h->cur_pic_ptr->tf.owner[last_field] == h->avctx) {
1496  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, last_field);
1497  }
1498 
1499  /* figure out if we have a complementary field pair */
1500  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1501  /* Previous field is unmatched. Don't display it, but let it
1502  * remain for reference if marked as such. */
1503  if (last_pic_structure != PICT_FRAME) {
1505  last_pic_structure == PICT_TOP_FIELD);
1506  }
1507  } else {
1508  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1509  /* This and previous field were reference, but had
1510  * different frame_nums. Consider this field first in
1511  * pair. Throw away previous field except for reference
1512  * purposes. */
1513  if (last_pic_structure != PICT_FRAME) {
1515  last_pic_structure == PICT_TOP_FIELD);
1516  }
1517  } else {
1518  /* Second field in complementary pair */
1519  if (!((last_pic_structure == PICT_TOP_FIELD &&
1521  (last_pic_structure == PICT_BOTTOM_FIELD &&
1524  "Invalid field mode combination %d/%d\n",
1525  last_pic_structure, h->picture_structure);
1526  h->picture_structure = last_pic_structure;
1527  h->droppable = last_pic_droppable;
1528  return AVERROR_INVALIDDATA;
1529  } else if (last_pic_droppable != h->droppable) {
1531  "Found reference and non-reference fields in the same frame, which");
1532  h->picture_structure = last_pic_structure;
1533  h->droppable = last_pic_droppable;
1534  return AVERROR_PATCHWELCOME;
1535  }
1536  }
1537  }
1538  }
1539 
1540  while (h->poc.frame_num != h->poc.prev_frame_num && !h->first_field &&
1541  h->poc.frame_num != (h->poc.prev_frame_num + 1) % (1 << sps->log2_max_frame_num)) {
1542  H264Picture *prev = h->short_ref_count ? h->short_ref[0] : NULL;
1543  av_log(h->avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n",
1544  h->poc.frame_num, h->poc.prev_frame_num);
1546  for(i=0; i<FF_ARRAY_ELEMS(h->last_pocs); i++)
1547  h->last_pocs[i] = INT_MIN;
1548  ret = h264_frame_start(h);
1549  if (ret < 0) {
1550  h->first_field = 0;
1551  return ret;
1552  }
1553 
1554  h->poc.prev_frame_num++;
1555  h->poc.prev_frame_num %= 1 << sps->log2_max_frame_num;
1558  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
1559  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
1560 
1561  h->explicit_ref_marking = 0;
1563  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1564  return ret;
1565  /* Error concealment: If a ref is missing, copy the previous ref
1566  * in its place.
1567  * FIXME: Avoiding a memcpy would be nice, but ref handling makes
1568  * many assumptions about there being no actual duplicates.
1569  * FIXME: This does not copy padding for out-of-frame motion
1570  * vectors. Given we are concealing a lost frame, this probably
1571  * is not noticeable by comparison, but it should be fixed. */
1572  if (h->short_ref_count) {
1573  int c[4] = {
1574  1<<(h->ps.sps->bit_depth_luma-1),
1575  1<<(h->ps.sps->bit_depth_chroma-1),
1576  1<<(h->ps.sps->bit_depth_chroma-1),
1577  -1
1578  };
1579 
1580  if (prev &&
1581  h->short_ref[0]->f->width == prev->f->width &&
1582  h->short_ref[0]->f->height == prev->f->height &&
1583  h->short_ref[0]->f->format == prev->f->format) {
1584  ff_thread_await_progress(&prev->tf, INT_MAX, 0);
1585  if (prev->field_picture)
1586  ff_thread_await_progress(&prev->tf, INT_MAX, 1);
1587  av_image_copy(h->short_ref[0]->f->data,
1588  h->short_ref[0]->f->linesize,
1589  (const uint8_t **)prev->f->data,
1590  prev->f->linesize,
1591  prev->f->format,
1592  prev->f->width,
1593  prev->f->height);
1594  h->short_ref[0]->poc = prev->poc + 2;
1595  } else if (!h->frame_recovered && !h->avctx->hwaccel)
1596  ff_color_frame(h->short_ref[0]->f, c);
1597  h->short_ref[0]->frame_num = h->poc.prev_frame_num;
1598  }
1599  }
1600 
1601  /* See if we have a decoded first field looking for a pair...
1602  * We're using that to see whether to continue decoding in that
1603  * frame, or to allocate a new one. */
1604  if (h->first_field) {
1605  av_assert0(h->cur_pic_ptr);
1606  av_assert0(h->cur_pic_ptr->f->buf[0]);
1607  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1608 
1609  /* figure out if we have a complementary field pair */
1610  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1611  /* Previous field is unmatched. Don't display it, but let it
1612  * remain for reference if marked as such. */
1613  h->missing_fields ++;
1614  h->cur_pic_ptr = NULL;
1615  h->first_field = FIELD_PICTURE(h);
1616  } else {
1617  h->missing_fields = 0;
1618  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1621  /* This and the previous field had different frame_nums.
1622  * Consider this field first in pair. Throw away previous
1623  * one except for reference purposes. */
1624  h->first_field = 1;
1625  h->cur_pic_ptr = NULL;
1626  } else if (h->cur_pic_ptr->reference & DELAYED_PIC_REF) {
1627  /* This frame was already output, we cannot draw into it
1628  * anymore.
1629  */
1630  h->first_field = 1;
1631  h->cur_pic_ptr = NULL;
1632  } else {
1633  /* Second field in complementary pair */
1634  h->first_field = 0;
1635  }
1636  }
1637  } else {
1638  /* Frame or first field in a potentially complementary pair */
1639  h->first_field = FIELD_PICTURE(h);
1640  }
1641 
1642  if (!FIELD_PICTURE(h) || h->first_field) {
1643  if (h264_frame_start(h) < 0) {
1644  h->first_field = 0;
1645  return AVERROR_INVALIDDATA;
1646  }
1647  } else {
1650  h->cur_pic_ptr->tf.owner[field] = h->avctx;
1651  }
1652  /* Some macroblocks can be accessed before they're available in case
1653  * of lost slices, MBAFF or threading. */
1654  if (FIELD_PICTURE(h)) {
1655  for(i = (h->picture_structure == PICT_BOTTOM_FIELD); i<h->mb_height; i++)
1656  memset(h->slice_table + i*h->mb_stride, -1, (h->mb_stride - (i+1==h->mb_height)) * sizeof(*h->slice_table));
1657  } else {
1658  memset(h->slice_table, -1,
1659  (h->mb_height * h->mb_stride - 1) * sizeof(*h->slice_table));
1660  }
1661 
1663  h->ps.sps, &h->poc, h->picture_structure, nal->ref_idc);
1664  if (ret < 0)
1665  return ret;
1666 
1667  memcpy(h->mmco, sl->mmco, sl->nb_mmco * sizeof(*h->mmco));
1668  h->nb_mmco = sl->nb_mmco;
1670 
1671  h->picture_idr = nal->type == H264_NAL_IDR_SLICE;
1672 
1673  if (h->sei.recovery_point.recovery_frame_cnt >= 0) {
1674  const int sei_recovery_frame_cnt = h->sei.recovery_point.recovery_frame_cnt;
1675 
1676  if (h->poc.frame_num != sei_recovery_frame_cnt || sl->slice_type_nos != AV_PICTURE_TYPE_I)
1677  h->valid_recovery_point = 1;
1678 
1679  if ( h->recovery_frame < 0
1680  || av_mod_uintp2(h->recovery_frame - h->poc.frame_num, h->ps.sps->log2_max_frame_num) > sei_recovery_frame_cnt) {
1681  h->recovery_frame = av_mod_uintp2(h->poc.frame_num + sei_recovery_frame_cnt, h->ps.sps->log2_max_frame_num);
1682 
1683  if (!h->valid_recovery_point)
1684  h->recovery_frame = h->poc.frame_num;
1685  }
1686  }
1687 
1688  h->cur_pic_ptr->f->key_frame |= (nal->type == H264_NAL_IDR_SLICE);
1689 
1690  if (nal->type == H264_NAL_IDR_SLICE ||
1691  (h->recovery_frame == h->poc.frame_num && nal->ref_idc)) {
1692  h->recovery_frame = -1;
1693  h->cur_pic_ptr->recovered = 1;
1694  }
1695  // If we have an IDR, all frames after it in decoded order are
1696  // "recovered".
1697  if (nal->type == H264_NAL_IDR_SLICE)
1699 #if 1
1701 #else
1703 #endif
1704 
1705  /* Set the frame properties/side data. Only done for the second field in
1706  * field coded frames, since some SEI information is present for each field
1707  * and is merged by the SEI parsing code. */
1708  if (!FIELD_PICTURE(h) || !h->first_field || h->missing_fields > 1) {
1709  ret = h264_export_frame_props(h);
1710  if (ret < 0)
1711  return ret;
1712 
1713  ret = h264_select_output_frame(h);
1714  if (ret < 0)
1715  return ret;
1716  }
1717 
1718  return 0;
1719 }
1720 
1722  const H2645NAL *nal)
1723 {
1724  const SPS *sps;
1725  const PPS *pps;
1726  int ret;
1727  unsigned int slice_type, tmp, i;
1728  int field_pic_flag, bottom_field_flag;
1729  int first_slice = sl == h->slice_ctx && !h->current_slice;
1730  int picture_structure;
1731 
1732  if (first_slice)
1734 
1735  sl->first_mb_addr = get_ue_golomb_long(&sl->gb);
1736 
1737  slice_type = get_ue_golomb_31(&sl->gb);
1738  if (slice_type > 9) {
1740  "slice type %d too large at %d\n",
1741  slice_type, sl->first_mb_addr);
1742  return AVERROR_INVALIDDATA;
1743  }
1744  if (slice_type > 4) {
1745  slice_type -= 5;
1746  sl->slice_type_fixed = 1;
1747  } else
1748  sl->slice_type_fixed = 0;
1749 
1750  slice_type = ff_h264_golomb_to_pict_type[slice_type];
1751  sl->slice_type = slice_type;
1752  sl->slice_type_nos = slice_type & 3;
1753 
1754  if (nal->type == H264_NAL_IDR_SLICE &&
1756  av_log(h->avctx, AV_LOG_ERROR, "A non-intra slice in an IDR NAL unit.\n");
1757  return AVERROR_INVALIDDATA;
1758  }
1759 
1760  sl->pps_id = get_ue_golomb(&sl->gb);
1761  if (sl->pps_id >= MAX_PPS_COUNT) {
1762  av_log(h->avctx, AV_LOG_ERROR, "pps_id %u out of range\n", sl->pps_id);
1763  return AVERROR_INVALIDDATA;
1764  }
1765  if (!h->ps.pps_list[sl->pps_id]) {
1767  "non-existing PPS %u referenced\n",
1768  sl->pps_id);
1769  return AVERROR_INVALIDDATA;
1770  }
1771  pps = (const PPS*)h->ps.pps_list[sl->pps_id]->data;
1772 
1773  if (!h->ps.sps_list[pps->sps_id]) {
1775  "non-existing SPS %u referenced\n", pps->sps_id);
1776  return AVERROR_INVALIDDATA;
1777  }
1778  sps = (const SPS*)h->ps.sps_list[pps->sps_id]->data;
1779 
1780  sl->frame_num = get_bits(&sl->gb, sps->log2_max_frame_num);
1781  if (!first_slice) {
1782  if (h->poc.frame_num != sl->frame_num) {
1783  av_log(h->avctx, AV_LOG_ERROR, "Frame num change from %d to %d\n",
1784  h->poc.frame_num, sl->frame_num);
1785  return AVERROR_INVALIDDATA;
1786  }
1787  }
1788 
1789  sl->mb_mbaff = 0;
1790 
1791  if (sps->frame_mbs_only_flag) {
1792  picture_structure = PICT_FRAME;
1793  } else {
1794  if (!sps->direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B) {
1795  av_log(h->avctx, AV_LOG_ERROR, "This stream was generated by a broken encoder, invalid 8x8 inference\n");
1796  return -1;
1797  }
1798  field_pic_flag = get_bits1(&sl->gb);
1799  if (field_pic_flag) {
1800  bottom_field_flag = get_bits1(&sl->gb);
1801  picture_structure = PICT_TOP_FIELD + bottom_field_flag;
1802  } else {
1803  picture_structure = PICT_FRAME;
1804  }
1805  }
1806  sl->picture_structure = picture_structure;
1807  sl->mb_field_decoding_flag = picture_structure != PICT_FRAME;
1808 
1809  if (picture_structure == PICT_FRAME) {
1810  sl->curr_pic_num = sl->frame_num;
1811  sl->max_pic_num = 1 << sps->log2_max_frame_num;
1812  } else {
1813  sl->curr_pic_num = 2 * sl->frame_num + 1;
1814  sl->max_pic_num = 1 << (sps->log2_max_frame_num + 1);
1815  }
1816 
1817  if (nal->type == H264_NAL_IDR_SLICE)
1818  get_ue_golomb_long(&sl->gb); /* idr_pic_id */
1819 
1820  if (sps->poc_type == 0) {
1821  sl->poc_lsb = get_bits(&sl->gb, sps->log2_max_poc_lsb);
1822 
1823  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1824  sl->delta_poc_bottom = get_se_golomb(&sl->gb);
1825  }
1826 
1827  if (sps->poc_type == 1 && !sps->delta_pic_order_always_zero_flag) {
1828  sl->delta_poc[0] = get_se_golomb(&sl->gb);
1829 
1830  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1831  sl->delta_poc[1] = get_se_golomb(&sl->gb);
1832  }
1833 
1834  sl->redundant_pic_count = 0;
1835  if (pps->redundant_pic_cnt_present)
1836  sl->redundant_pic_count = get_ue_golomb(&sl->gb);
1837 
1838  if (sl->slice_type_nos == AV_PICTURE_TYPE_B)
1839  sl->direct_spatial_mv_pred = get_bits1(&sl->gb);
1840 
1842  &sl->gb, pps, sl->slice_type_nos,
1843  picture_structure, h->avctx);
1844  if (ret < 0)
1845  return ret;
1846 
1847  if (sl->slice_type_nos != AV_PICTURE_TYPE_I) {
1849  if (ret < 0) {
1850  sl->ref_count[1] = sl->ref_count[0] = 0;
1851  return ret;
1852  }
1853  }
1854 
1855  sl->pwt.use_weight = 0;
1856  for (i = 0; i < 2; i++) {
1857  sl->pwt.luma_weight_flag[i] = 0;
1858  sl->pwt.chroma_weight_flag[i] = 0;
1859  }
1860  if ((pps->weighted_pred && sl->slice_type_nos == AV_PICTURE_TYPE_P) ||
1861  (pps->weighted_bipred_idc == 1 &&
1863  ret = ff_h264_pred_weight_table(&sl->gb, sps, sl->ref_count,
1864  sl->slice_type_nos, &sl->pwt,
1865  picture_structure, h->avctx);
1866  if (ret < 0)
1867  return ret;
1868  }
1869 
1870  sl->explicit_ref_marking = 0;
1871  if (nal->ref_idc) {
1872  ret = ff_h264_decode_ref_pic_marking(sl, &sl->gb, nal, h->avctx);
1873  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1874  return AVERROR_INVALIDDATA;
1875  }
1876 
1877  if (sl->slice_type_nos != AV_PICTURE_TYPE_I && pps->cabac) {
1878  tmp = get_ue_golomb_31(&sl->gb);
1879  if (tmp > 2) {
1880  av_log(h->avctx, AV_LOG_ERROR, "cabac_init_idc %u overflow\n", tmp);
1881  return AVERROR_INVALIDDATA;
1882  }
1883  sl->cabac_init_idc = tmp;
1884  }
1885 
1886  sl->last_qscale_diff = 0;
1887  tmp = pps->init_qp + (unsigned)get_se_golomb(&sl->gb);
1888  if (tmp > 51 + 6 * (sps->bit_depth_luma - 8)) {
1889  av_log(h->avctx, AV_LOG_ERROR, "QP %u out of range\n", tmp);
1890  return AVERROR_INVALIDDATA;
1891  }
1892  sl->qscale = tmp;
1893  sl->chroma_qp[0] = get_chroma_qp(pps, 0, sl->qscale);
1894  sl->chroma_qp[1] = get_chroma_qp(pps, 1, sl->qscale);
1895  // FIXME qscale / qp ... stuff
1896  if (sl->slice_type == AV_PICTURE_TYPE_SP)
1897  get_bits1(&sl->gb); /* sp_for_switch_flag */
1898  if (sl->slice_type == AV_PICTURE_TYPE_SP ||
1900  get_se_golomb(&sl->gb); /* slice_qs_delta */
1901 
1902  sl->deblocking_filter = 1;
1903  sl->slice_alpha_c0_offset = 0;
1904  sl->slice_beta_offset = 0;
1906  tmp = get_ue_golomb_31(&sl->gb);
1907  if (tmp > 2) {
1909  "deblocking_filter_idc %u out of range\n", tmp);
1910  return AVERROR_INVALIDDATA;
1911  }
1912  sl->deblocking_filter = tmp;
1913  if (sl->deblocking_filter < 2)
1914  sl->deblocking_filter ^= 1; // 1<->0
1915 
1916  if (sl->deblocking_filter) {
1917  int slice_alpha_c0_offset_div2 = get_se_golomb(&sl->gb);
1918  int slice_beta_offset_div2 = get_se_golomb(&sl->gb);
1919  if (slice_alpha_c0_offset_div2 > 6 ||
1920  slice_alpha_c0_offset_div2 < -6 ||
1921  slice_beta_offset_div2 > 6 ||
1922  slice_beta_offset_div2 < -6) {
1924  "deblocking filter parameters %d %d out of range\n",
1925  slice_alpha_c0_offset_div2, slice_beta_offset_div2);
1926  return AVERROR_INVALIDDATA;
1927  }
1928  sl->slice_alpha_c0_offset = slice_alpha_c0_offset_div2 * 2;
1929  sl->slice_beta_offset = slice_beta_offset_div2 * 2;
1930  }
1931  }
1932 
1933  return 0;
1934 }
1935 
1936 /* do all the per-slice initialization needed before we can start decoding the
1937  * actual MBs */
1939  const H2645NAL *nal)
1940 {
1941  int i, j, ret = 0;
1942 
1943  if (h->picture_idr && nal->type != H264_NAL_IDR_SLICE) {
1944  av_log(h->avctx, AV_LOG_ERROR, "Invalid mix of IDR and non-IDR slices\n");
1945  return AVERROR_INVALIDDATA;
1946  }
1947 
1948  av_assert1(h->mb_num == h->mb_width * h->mb_height);
1949  if (sl->first_mb_addr << FIELD_OR_MBAFF_PICTURE(h) >= h->mb_num ||
1950  sl->first_mb_addr >= h->mb_num) {
1951  av_log(h->avctx, AV_LOG_ERROR, "first_mb_in_slice overflow\n");
1952  return AVERROR_INVALIDDATA;
1953  }
1954  sl->resync_mb_x = sl->mb_x = sl->first_mb_addr % h->mb_width;
1955  sl->resync_mb_y = sl->mb_y = (sl->first_mb_addr / h->mb_width) <<
1958  sl->resync_mb_y = sl->mb_y = sl->mb_y + 1;
1959  av_assert1(sl->mb_y < h->mb_height);
1960 
1961  ret = ff_h264_build_ref_list(h, sl);
1962  if (ret < 0)
1963  return ret;
1964 
1965  if (h->ps.pps->weighted_bipred_idc == 2 &&
1967  implicit_weight_table(h, sl, -1);
1968  if (FRAME_MBAFF(h)) {
1969  implicit_weight_table(h, sl, 0);
1970  implicit_weight_table(h, sl, 1);
1971  }
1972  }
1973 
1976  if (!h->setup_finished)
1978 
1979  if (h->avctx->skip_loop_filter >= AVDISCARD_ALL ||
1987  nal->ref_idc == 0))
1988  sl->deblocking_filter = 0;
1989 
1990  if (sl->deblocking_filter == 1 && h->nb_slice_ctx > 1) {
1991  if (h->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
1992  /* Cheat slightly for speed:
1993  * Do not bother to deblock across slices. */
1994  sl->deblocking_filter = 2;
1995  } else {
1996  h->postpone_filter = 1;
1997  }
1998  }
1999  sl->qp_thresh = 15 -
2001  FFMAX3(0,
2002  h->ps.pps->chroma_qp_index_offset[0],
2003  h->ps.pps->chroma_qp_index_offset[1]) +
2004  6 * (h->ps.sps->bit_depth_luma - 8);
2005 
2006  sl->slice_num = ++h->current_slice;
2007 
2008  if (sl->slice_num)
2009  h->slice_row[(sl->slice_num-1)&(MAX_SLICES-1)]= sl->resync_mb_y;
2010  if ( h->slice_row[sl->slice_num&(MAX_SLICES-1)] + 3 >= sl->resync_mb_y
2011  && h->slice_row[sl->slice_num&(MAX_SLICES-1)] <= sl->resync_mb_y
2012  && sl->slice_num >= MAX_SLICES) {
2013  //in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
2014  av_log(h->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", sl->slice_num, MAX_SLICES);
2015  }
2016 
2017  for (j = 0; j < 2; j++) {
2018  int id_list[16];
2019  int *ref2frm = h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][j];
2020  for (i = 0; i < 16; i++) {
2021  id_list[i] = 60;
2022  if (j < sl->list_count && i < sl->ref_count[j] &&
2023  sl->ref_list[j][i].parent->f->buf[0]) {
2024  int k;
2025  AVBuffer *buf = sl->ref_list[j][i].parent->f->buf[0]->buffer;
2026  for (k = 0; k < h->short_ref_count; k++)
2027  if (h->short_ref[k]->f->buf[0]->buffer == buf) {
2028  id_list[i] = k;
2029  break;
2030  }
2031  for (k = 0; k < h->long_ref_count; k++)
2032  if (h->long_ref[k] && h->long_ref[k]->f->buf[0]->buffer == buf) {
2033  id_list[i] = h->short_ref_count + k;
2034  break;
2035  }
2036  }
2037  }
2038 
2039  ref2frm[0] =
2040  ref2frm[1] = -1;
2041  for (i = 0; i < 16; i++)
2042  ref2frm[i + 2] = 4 * id_list[i] + (sl->ref_list[j][i].reference & 3);
2043  ref2frm[18 + 0] =
2044  ref2frm[18 + 1] = -1;
2045  for (i = 16; i < 48; i++)
2046  ref2frm[i + 4] = 4 * id_list[(i - 16) >> 1] +
2047  (sl->ref_list[j][i].reference & 3);
2048  }
2049 
2050  if (h->avctx->debug & FF_DEBUG_PICT_INFO) {
2052  "slice:%d %s mb:%d %c%s%s frame:%d poc:%d/%d ref:%d/%d qp:%d loop:%d:%d:%d weight:%d%s %s\n",
2053  sl->slice_num,
2054  (h->picture_structure == PICT_FRAME ? "F" : h->picture_structure == PICT_TOP_FIELD ? "T" : "B"),
2055  sl->mb_y * h->mb_width + sl->mb_x,
2057  sl->slice_type_fixed ? " fix" : "",
2058  nal->type == H264_NAL_IDR_SLICE ? " IDR" : "",
2059  h->poc.frame_num,
2060  h->cur_pic_ptr->field_poc[0],
2061  h->cur_pic_ptr->field_poc[1],
2062  sl->ref_count[0], sl->ref_count[1],
2063  sl->qscale,
2064  sl->deblocking_filter,
2066  sl->pwt.use_weight,
2067  sl->pwt.use_weight == 1 && sl->pwt.use_weight_chroma ? "c" : "",
2068  sl->slice_type == AV_PICTURE_TYPE_B ? (sl->direct_spatial_mv_pred ? "SPAT" : "TEMP") : "");
2069  }
2070 
2071  return 0;
2072 }
2073 
2075 {
2077  int first_slice = sl == h->slice_ctx && !h->current_slice;
2078  int ret;
2079 
2080  sl->gb = nal->gb;
2081 
2082  ret = h264_slice_header_parse(h, sl, nal);
2083  if (ret < 0)
2084  return ret;
2085 
2086  // discard redundant pictures
2087  if (sl->redundant_pic_count > 0) {
2088  sl->ref_count[0] = sl->ref_count[1] = 0;
2089  return 0;
2090  }
2091 
2092  if (sl->first_mb_addr == 0 || !h->current_slice) {
2093  if (h->setup_finished) {
2094  av_log(h->avctx, AV_LOG_ERROR, "Too many fields\n");
2095  return AVERROR_INVALIDDATA;
2096  }
2097  }
2098 
2099  if (sl->first_mb_addr == 0) { // FIXME better field boundary detection
2100  if (h->current_slice) {
2101  // this slice starts a new field
2102  // first decode any pending queued slices
2103  if (h->nb_slice_ctx_queued) {
2104  H264SliceContext tmp_ctx;
2105 
2107  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
2108  return ret;
2109 
2110  memcpy(&tmp_ctx, h->slice_ctx, sizeof(tmp_ctx));
2111  memcpy(h->slice_ctx, sl, sizeof(tmp_ctx));
2112  memcpy(sl, &tmp_ctx, sizeof(tmp_ctx));
2113  sl = h->slice_ctx;
2114  }
2115 
2116  if (h->cur_pic_ptr && FIELD_PICTURE(h) && h->first_field) {
2117  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2118  if (ret < 0)
2119  return ret;
2120  } else if (h->cur_pic_ptr && !FIELD_PICTURE(h) && !h->first_field && h->nal_unit_type == H264_NAL_IDR_SLICE) {
2121  av_log(h, AV_LOG_WARNING, "Broken frame packetizing\n");
2122  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2123  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
2124  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
2125  h->cur_pic_ptr = NULL;
2126  if (ret < 0)
2127  return ret;
2128  } else
2129  return AVERROR_INVALIDDATA;
2130  }
2131 
2132  if (!h->first_field) {
2133  if (h->cur_pic_ptr && !h->droppable) {
2136  }
2137  h->cur_pic_ptr = NULL;
2138  }
2139  }
2140 
2141  if (!h->current_slice)
2142  av_assert0(sl == h->slice_ctx);
2143 
2144  if (h->current_slice == 0 && !h->first_field) {
2145  if (
2146  (h->avctx->skip_frame >= AVDISCARD_NONREF && !h->nal_ref_idc) ||
2150  h->avctx->skip_frame >= AVDISCARD_ALL) {
2151  return 0;
2152  }
2153  }
2154 
2155  if (!first_slice) {
2156  const PPS *pps = (const PPS*)h->ps.pps_list[sl->pps_id]->data;
2157 
2158  if (h->ps.pps->sps_id != pps->sps_id ||
2159  h->ps.pps->transform_8x8_mode != pps->transform_8x8_mode /*||
2160  (h->setup_finished && h->ps.pps != pps)*/) {
2161  av_log(h->avctx, AV_LOG_ERROR, "PPS changed between slices\n");
2162  return AVERROR_INVALIDDATA;
2163  }
2164  if (h->ps.sps != (const SPS*)h->ps.sps_list[h->ps.pps->sps_id]->data) {
2166  "SPS changed in the middle of the frame\n");
2167  return AVERROR_INVALIDDATA;
2168  }
2169  }
2170 
2171  if (h->current_slice == 0) {
2172  ret = h264_field_start(h, sl, nal, first_slice);
2173  if (ret < 0)
2174  return ret;
2175  } else {
2176  if (h->picture_structure != sl->picture_structure ||
2177  h->droppable != (nal->ref_idc == 0)) {
2179  "Changing field mode (%d -> %d) between slices is not allowed\n",
2181  return AVERROR_INVALIDDATA;
2182  } else if (!h->cur_pic_ptr) {
2184  "unset cur_pic_ptr on slice %d\n",
2185  h->current_slice + 1);
2186  return AVERROR_INVALIDDATA;
2187  }
2188  }
2189 
2190  ret = h264_slice_init(h, sl, nal);
2191  if (ret < 0)
2192  return ret;
2193 
2194  h->nb_slice_ctx_queued++;
2195 
2196  return 0;
2197 }
2198 
2200 {
2201  switch (sl->slice_type) {
2202  case AV_PICTURE_TYPE_P:
2203  return 0;
2204  case AV_PICTURE_TYPE_B:
2205  return 1;
2206  case AV_PICTURE_TYPE_I:
2207  return 2;
2208  case AV_PICTURE_TYPE_SP:
2209  return 3;
2210  case AV_PICTURE_TYPE_SI:
2211  return 4;
2212  default:
2213  return AVERROR_INVALIDDATA;
2214  }
2215 }
2216 
2218  H264SliceContext *sl,
2219  int mb_type, int top_xy,
2220  int left_xy[LEFT_MBS],
2221  int top_type,
2222  int left_type[LEFT_MBS],
2223  int mb_xy, int list)
2224 {
2225  int b_stride = h->b_stride;
2226  int16_t(*mv_dst)[2] = &sl->mv_cache[list][scan8[0]];
2227  int8_t *ref_cache = &sl->ref_cache[list][scan8[0]];
2228  if (IS_INTER(mb_type) || IS_DIRECT(mb_type)) {
2229  if (USES_LIST(top_type, list)) {
2230  const int b_xy = h->mb2b_xy[top_xy] + 3 * b_stride;
2231  const int b8_xy = 4 * top_xy + 2;
2232  const int *ref2frm = &h->ref2frm[h->slice_table[top_xy] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2233  AV_COPY128(mv_dst - 1 * 8, h->cur_pic.motion_val[list][b_xy + 0]);
2234  ref_cache[0 - 1 * 8] =
2235  ref_cache[1 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 0]];
2236  ref_cache[2 - 1 * 8] =
2237  ref_cache[3 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 1]];
2238  } else {
2239  AV_ZERO128(mv_dst - 1 * 8);
2240  AV_WN32A(&ref_cache[0 - 1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2241  }
2242 
2243  if (!IS_INTERLACED(mb_type ^ left_type[LTOP])) {
2244  if (USES_LIST(left_type[LTOP], list)) {
2245  const int b_xy = h->mb2b_xy[left_xy[LTOP]] + 3;
2246  const int b8_xy = 4 * left_xy[LTOP] + 1;
2247  const int *ref2frm = &h->ref2frm[h->slice_table[left_xy[LTOP]] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2248  AV_COPY32(mv_dst - 1 + 0, h->cur_pic.motion_val[list][b_xy + b_stride * 0]);
2249  AV_COPY32(mv_dst - 1 + 8, h->cur_pic.motion_val[list][b_xy + b_stride * 1]);
2250  AV_COPY32(mv_dst - 1 + 16, h->cur_pic.motion_val[list][b_xy + b_stride * 2]);
2251  AV_COPY32(mv_dst - 1 + 24, h->cur_pic.motion_val[list][b_xy + b_stride * 3]);
2252  ref_cache[-1 + 0] =
2253  ref_cache[-1 + 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 0]];
2254  ref_cache[-1 + 16] =
2255  ref_cache[-1 + 24] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 1]];
2256  } else {
2257  AV_ZERO32(mv_dst - 1 + 0);
2258  AV_ZERO32(mv_dst - 1 + 8);
2259  AV_ZERO32(mv_dst - 1 + 16);
2260  AV_ZERO32(mv_dst - 1 + 24);
2261  ref_cache[-1 + 0] =
2262  ref_cache[-1 + 8] =
2263  ref_cache[-1 + 16] =
2264  ref_cache[-1 + 24] = LIST_NOT_USED;
2265  }
2266  }
2267  }
2268 
2269  if (!USES_LIST(mb_type, list)) {
2270  fill_rectangle(mv_dst, 4, 4, 8, pack16to32(0, 0), 4);
2271  AV_WN32A(&ref_cache[0 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2272  AV_WN32A(&ref_cache[1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2273  AV_WN32A(&ref_cache[2 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2274  AV_WN32A(&ref_cache[3 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2275  return;
2276  }
2277 
2278  {
2279  int8_t *ref = &h->cur_pic.ref_index[list][4 * mb_xy];
2280  const int *ref2frm = &h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2281  uint32_t ref01 = (pack16to32(ref2frm[ref[0]], ref2frm[ref[1]]) & 0x00FF00FF) * 0x0101;
2282  uint32_t ref23 = (pack16to32(ref2frm[ref[2]], ref2frm[ref[3]]) & 0x00FF00FF) * 0x0101;
2283  AV_WN32A(&ref_cache[0 * 8], ref01);
2284  AV_WN32A(&ref_cache[1 * 8], ref01);
2285  AV_WN32A(&ref_cache[2 * 8], ref23);
2286  AV_WN32A(&ref_cache[3 * 8], ref23);
2287  }
2288 
2289  {
2290  int16_t(*mv_src)[2] = &h->cur_pic.motion_val[list][4 * sl->mb_x + 4 * sl->mb_y * b_stride];
2291  AV_COPY128(mv_dst + 8 * 0, mv_src + 0 * b_stride);
2292  AV_COPY128(mv_dst + 8 * 1, mv_src + 1 * b_stride);
2293  AV_COPY128(mv_dst + 8 * 2, mv_src + 2 * b_stride);
2294  AV_COPY128(mv_dst + 8 * 3, mv_src + 3 * b_stride);
2295  }
2296 }
2297 
2298 /**
2299  * @return non zero if the loop filter can be skipped
2300  */
2301 static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
2302 {
2303  const int mb_xy = sl->mb_xy;
2304  int top_xy, left_xy[LEFT_MBS];
2305  int top_type, left_type[LEFT_MBS];
2306  uint8_t *nnz;
2307  uint8_t *nnz_cache;
2308 
2309  top_xy = mb_xy - (h->mb_stride << MB_FIELD(sl));
2310 
2311  left_xy[LBOT] = left_xy[LTOP] = mb_xy - 1;
2312  if (FRAME_MBAFF(h)) {
2313  const int left_mb_field_flag = IS_INTERLACED(h->cur_pic.mb_type[mb_xy - 1]);
2314  const int curr_mb_field_flag = IS_INTERLACED(mb_type);
2315  if (sl->mb_y & 1) {
2316  if (left_mb_field_flag != curr_mb_field_flag)
2317  left_xy[LTOP] -= h->mb_stride;
2318  } else {
2319  if (curr_mb_field_flag)
2320  top_xy += h->mb_stride &
2321  (((h->cur_pic.mb_type[top_xy] >> 7) & 1) - 1);
2322  if (left_mb_field_flag != curr_mb_field_flag)
2323  left_xy[LBOT] += h->mb_stride;
2324  }
2325  }
2326 
2327  sl->top_mb_xy = top_xy;
2328  sl->left_mb_xy[LTOP] = left_xy[LTOP];
2329  sl->left_mb_xy[LBOT] = left_xy[LBOT];
2330  {
2331  /* For sufficiently low qp, filtering wouldn't do anything.
2332  * This is a conservative estimate: could also check beta_offset
2333  * and more accurate chroma_qp. */
2334  int qp_thresh = sl->qp_thresh; // FIXME strictly we should store qp_thresh for each mb of a slice
2335  int qp = h->cur_pic.qscale_table[mb_xy];
2336  if (qp <= qp_thresh &&
2337  (left_xy[LTOP] < 0 ||
2338  ((qp + h->cur_pic.qscale_table[left_xy[LTOP]] + 1) >> 1) <= qp_thresh) &&
2339  (top_xy < 0 ||
2340  ((qp + h->cur_pic.qscale_table[top_xy] + 1) >> 1) <= qp_thresh)) {
2341  if (!FRAME_MBAFF(h))
2342  return 1;
2343  if ((left_xy[LTOP] < 0 ||
2344  ((qp + h->cur_pic.qscale_table[left_xy[LBOT]] + 1) >> 1) <= qp_thresh) &&
2345  (top_xy < h->mb_stride ||
2346  ((qp + h->cur_pic.qscale_table[top_xy - h->mb_stride] + 1) >> 1) <= qp_thresh))
2347  return 1;
2348  }
2349  }
2350 
2351  top_type = h->cur_pic.mb_type[top_xy];
2352  left_type[LTOP] = h->cur_pic.mb_type[left_xy[LTOP]];
2353  left_type[LBOT] = h->cur_pic.mb_type[left_xy[LBOT]];
2354  if (sl->deblocking_filter == 2) {
2355  if (h->slice_table[top_xy] != sl->slice_num)
2356  top_type = 0;
2357  if (h->slice_table[left_xy[LBOT]] != sl->slice_num)
2358  left_type[LTOP] = left_type[LBOT] = 0;
2359  } else {
2360  if (h->slice_table[top_xy] == 0xFFFF)
2361  top_type = 0;
2362  if (h->slice_table[left_xy[LBOT]] == 0xFFFF)
2363  left_type[LTOP] = left_type[LBOT] = 0;
2364  }
2365  sl->top_type = top_type;
2366  sl->left_type[LTOP] = left_type[LTOP];
2367  sl->left_type[LBOT] = left_type[LBOT];
2368 
2369  if (IS_INTRA(mb_type))
2370  return 0;
2371 
2372  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2373  top_type, left_type, mb_xy, 0);
2374  if (sl->list_count == 2)
2375  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2376  top_type, left_type, mb_xy, 1);
2377 
2378  nnz = h->non_zero_count[mb_xy];
2379  nnz_cache = sl->non_zero_count_cache;
2380  AV_COPY32(&nnz_cache[4 + 8 * 1], &nnz[0]);
2381  AV_COPY32(&nnz_cache[4 + 8 * 2], &nnz[4]);
2382  AV_COPY32(&nnz_cache[4 + 8 * 3], &nnz[8]);
2383  AV_COPY32(&nnz_cache[4 + 8 * 4], &nnz[12]);
2384  sl->cbp = h->cbp_table[mb_xy];
2385 
2386  if (top_type) {
2387  nnz = h->non_zero_count[top_xy];
2388  AV_COPY32(&nnz_cache[4 + 8 * 0], &nnz[3 * 4]);
2389  }
2390 
2391  if (left_type[LTOP]) {
2392  nnz = h->non_zero_count[left_xy[LTOP]];
2393  nnz_cache[3 + 8 * 1] = nnz[3 + 0 * 4];
2394  nnz_cache[3 + 8 * 2] = nnz[3 + 1 * 4];
2395  nnz_cache[3 + 8 * 3] = nnz[3 + 2 * 4];
2396  nnz_cache[3 + 8 * 4] = nnz[3 + 3 * 4];
2397  }
2398 
2399  /* CAVLC 8x8dct requires NNZ values for residual decoding that differ
2400  * from what the loop filter needs */
2401  if (!CABAC(h) && h->ps.pps->transform_8x8_mode) {
2402  if (IS_8x8DCT(top_type)) {
2403  nnz_cache[4 + 8 * 0] =
2404  nnz_cache[5 + 8 * 0] = (h->cbp_table[top_xy] & 0x4000) >> 12;
2405  nnz_cache[6 + 8 * 0] =
2406  nnz_cache[7 + 8 * 0] = (h->cbp_table[top_xy] & 0x8000) >> 12;
2407  }
2408  if (IS_8x8DCT(left_type[LTOP])) {
2409  nnz_cache[3 + 8 * 1] =
2410  nnz_cache[3 + 8 * 2] = (h->cbp_table[left_xy[LTOP]] & 0x2000) >> 12; // FIXME check MBAFF
2411  }
2412  if (IS_8x8DCT(left_type[LBOT])) {
2413  nnz_cache[3 + 8 * 3] =
2414  nnz_cache[3 + 8 * 4] = (h->cbp_table[left_xy[LBOT]] & 0x8000) >> 12; // FIXME check MBAFF
2415  }
2416 
2417  if (IS_8x8DCT(mb_type)) {
2418  nnz_cache[scan8[0]] =
2419  nnz_cache[scan8[1]] =
2420  nnz_cache[scan8[2]] =
2421  nnz_cache[scan8[3]] = (sl->cbp & 0x1000) >> 12;
2422 
2423  nnz_cache[scan8[0 + 4]] =
2424  nnz_cache[scan8[1 + 4]] =
2425  nnz_cache[scan8[2 + 4]] =
2426  nnz_cache[scan8[3 + 4]] = (sl->cbp & 0x2000) >> 12;
2427 
2428  nnz_cache[scan8[0 + 8]] =
2429  nnz_cache[scan8[1 + 8]] =
2430  nnz_cache[scan8[2 + 8]] =
2431  nnz_cache[scan8[3 + 8]] = (sl->cbp & 0x4000) >> 12;
2432 
2433  nnz_cache[scan8[0 + 12]] =
2434  nnz_cache[scan8[1 + 12]] =
2435  nnz_cache[scan8[2 + 12]] =
2436  nnz_cache[scan8[3 + 12]] = (sl->cbp & 0x8000) >> 12;
2437  }
2438  }
2439 
2440  return 0;
2441 }
2442 
2443 static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
2444 {
2445  uint8_t *dest_y, *dest_cb, *dest_cr;
2446  int linesize, uvlinesize, mb_x, mb_y;
2447  const int end_mb_y = sl->mb_y + FRAME_MBAFF(h);
2448  const int old_slice_type = sl->slice_type;
2449  const int pixel_shift = h->pixel_shift;
2450  const int block_h = 16 >> h->chroma_y_shift;
2451 
2452  if (h->postpone_filter)
2453  return;
2454 
2455  if (sl->deblocking_filter) {
2456  for (mb_x = start_x; mb_x < end_x; mb_x++)
2457  for (mb_y = end_mb_y - FRAME_MBAFF(h); mb_y <= end_mb_y; mb_y++) {
2458  int mb_xy, mb_type;
2459  mb_xy = sl->mb_xy = mb_x + mb_y * h->mb_stride;
2460  mb_type = h->cur_pic.mb_type[mb_xy];
2461 
2462  if (FRAME_MBAFF(h))
2463  sl->mb_mbaff =
2464  sl->mb_field_decoding_flag = !!IS_INTERLACED(mb_type);
2465 
2466  sl->mb_x = mb_x;
2467  sl->mb_y = mb_y;
2468  dest_y = h->cur_pic.f->data[0] +
2469  ((mb_x << pixel_shift) + mb_y * sl->linesize) * 16;
2470  dest_cb = h->cur_pic.f->data[1] +
2471  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2472  mb_y * sl->uvlinesize * block_h;
2473  dest_cr = h->cur_pic.f->data[2] +
2474  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2475  mb_y * sl->uvlinesize * block_h;
2476  // FIXME simplify above
2477 
2478  if (MB_FIELD(sl)) {
2479  linesize = sl->mb_linesize = sl->linesize * 2;
2480  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize * 2;
2481  if (mb_y & 1) { // FIXME move out of this function?
2482  dest_y -= sl->linesize * 15;
2483  dest_cb -= sl->uvlinesize * (block_h - 1);
2484  dest_cr -= sl->uvlinesize * (block_h - 1);
2485  }
2486  } else {
2487  linesize = sl->mb_linesize = sl->linesize;
2488  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize;
2489  }
2490  backup_mb_border(h, sl, dest_y, dest_cb, dest_cr, linesize,
2491  uvlinesize, 0);
2492  if (fill_filter_caches(h, sl, mb_type))
2493  continue;
2494  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, h->cur_pic.qscale_table[mb_xy]);
2495  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, h->cur_pic.qscale_table[mb_xy]);
2496 
2497  if (FRAME_MBAFF(h)) {
2498  ff_h264_filter_mb(h, sl, mb_x, mb_y, dest_y, dest_cb, dest_cr,
2499  linesize, uvlinesize);
2500  } else {
2501  ff_h264_filter_mb_fast(h, sl, mb_x, mb_y, dest_y, dest_cb,
2502  dest_cr, linesize, uvlinesize);
2503  }
2504  }
2505  }
2506  sl->slice_type = old_slice_type;
2507  sl->mb_x = end_x;
2508  sl->mb_y = end_mb_y - FRAME_MBAFF(h);
2509  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, sl->qscale);
2510  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, sl->qscale);
2511 }
2512 
2514 {
2515  const int mb_xy = sl->mb_x + sl->mb_y * h->mb_stride;
2516  int mb_type = (h->slice_table[mb_xy - 1] == sl->slice_num) ?
2517  h->cur_pic.mb_type[mb_xy - 1] :
2518  (h->slice_table[mb_xy - h->mb_stride] == sl->slice_num) ?
2519  h->cur_pic.mb_type[mb_xy - h->mb_stride] : 0;
2520  sl->mb_mbaff = sl->mb_field_decoding_flag = IS_INTERLACED(mb_type) ? 1 : 0;
2521 }
2522 
2523 /**
2524  * Draw edges and report progress for the last MB row.
2525  */
2527 {
2528  int top = 16 * (sl->mb_y >> FIELD_PICTURE(h));
2529  int pic_height = 16 * h->mb_height >> FIELD_PICTURE(h);
2530  int height = 16 << FRAME_MBAFF(h);
2531  int deblock_border = (16 + 4) << FRAME_MBAFF(h);
2532 
2533  if (sl->deblocking_filter) {
2534  if ((top + height) >= pic_height)
2535  height += deblock_border;
2536  top -= deblock_border;
2537  }
2538 
2539  if (top >= pic_height || (top + height) < 0)
2540  return;
2541 
2542  height = FFMIN(height, pic_height - top);
2543  if (top < 0) {
2544  height = top + height;
2545  top = 0;
2546  }
2547 
2548  ff_h264_draw_horiz_band(h, sl, top, height);
2549 
2550  if (h->droppable || sl->h264->slice_ctx[0].er.error_occurred)
2551  return;
2552 
2553  ff_thread_report_progress(&h->cur_pic_ptr->tf, top + height - 1,
2555 }
2556 
2558  int startx, int starty,
2559  int endx, int endy, int status)
2560 {
2561  if (!sl->h264->enable_er)
2562  return;
2563 
2564  if (CONFIG_ERROR_RESILIENCE) {
2565  ERContext *er = &sl->h264->slice_ctx[0].er;
2566 
2567  ff_er_add_slice(er, startx, starty, endx, endy, status);
2568  }
2569 }
2570 
2571 static int decode_slice(struct AVCodecContext *avctx, void *arg)
2572 {
2573  H264SliceContext *sl = arg;
2574  const H264Context *h = sl->h264;
2575  int lf_x_start = sl->mb_x;
2576  int orig_deblock = sl->deblocking_filter;
2577  int ret;
2578 
2579  sl->linesize = h->cur_pic_ptr->f->linesize[0];
2580  sl->uvlinesize = h->cur_pic_ptr->f->linesize[1];
2581 
2582  ret = alloc_scratch_buffers(sl, sl->linesize);
2583  if (ret < 0)
2584  return ret;
2585 
2586  sl->mb_skip_run = -1;
2587 
2588  av_assert0(h->block_offset[15] == (4 * ((scan8[15] - scan8[0]) & 7) << h->pixel_shift) + 4 * sl->linesize * ((scan8[15] - scan8[0]) >> 3));
2589 
2590  if (h->postpone_filter)
2591  sl->deblocking_filter = 0;
2592 
2593  sl->is_complex = FRAME_MBAFF(h) || h->picture_structure != PICT_FRAME ||
2594  (CONFIG_GRAY && (h->flags & AV_CODEC_FLAG_GRAY));
2595 
2597  const int start_i = av_clip(sl->resync_mb_x + sl->resync_mb_y * h->mb_width, 0, h->mb_num - 1);
2598  if (start_i) {
2599  int prev_status = h->slice_ctx[0].er.error_status_table[h->slice_ctx[0].er.mb_index2xy[start_i - 1]];
2600  prev_status &= ~ VP_START;
2601  if (prev_status != (ER_MV_END | ER_DC_END | ER_AC_END))
2602  h->slice_ctx[0].er.error_occurred = 1;
2603  }
2604  }
2605 
2606  if (h->ps.pps->cabac) {
2607  /* realign */
2608  align_get_bits(&sl->gb);
2609 
2610  /* init cabac */
2611  ret = ff_init_cabac_decoder(&sl->cabac,
2612  sl->gb.buffer + get_bits_count(&sl->gb) / 8,
2613  (get_bits_left(&sl->gb) + 7) / 8);
2614  if (ret < 0)
2615  return ret;
2616 
2618 
2619  for (;;) {
2620  int ret, eos;
2621  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2622  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2623  sl->next_slice_idx);
2624  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2625  sl->mb_y, ER_MB_ERROR);
2626  return AVERROR_INVALIDDATA;
2627  }
2628 
2629  ret = ff_h264_decode_mb_cabac(h, sl);
2630 
2631  if (ret >= 0)
2632  ff_h264_hl_decode_mb(h, sl);
2633 
2634  // FIXME optimal? or let mb_decode decode 16x32 ?
2635  if (ret >= 0 && FRAME_MBAFF(h)) {
2636  sl->mb_y++;
2637 
2638  ret = ff_h264_decode_mb_cabac(h, sl);
2639 
2640  if (ret >= 0)
2641  ff_h264_hl_decode_mb(h, sl);
2642  sl->mb_y--;
2643  }
2644  eos = get_cabac_terminate(&sl->cabac);
2645 
2646  if ((h->workaround_bugs & FF_BUG_TRUNCATED) &&
2647  sl->cabac.bytestream > sl->cabac.bytestream_end + 2) {
2648  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2649  sl->mb_y, ER_MB_END);
2650  if (sl->mb_x >= lf_x_start)
2651  loop_filter(h, sl, lf_x_start, sl->mb_x + 1);
2652  goto finish;
2653  }
2654  if (sl->cabac.bytestream > sl->cabac.bytestream_end + 2 )
2655  av_log(h->avctx, AV_LOG_DEBUG, "bytestream overread %"PTRDIFF_SPECIFIER"\n", sl->cabac.bytestream_end - sl->cabac.bytestream);
2656  if (ret < 0 || sl->cabac.bytestream > sl->cabac.bytestream_end + 4) {
2658  "error while decoding MB %d %d, bytestream %"PTRDIFF_SPECIFIER"\n",
2659  sl->mb_x, sl->mb_y,
2660  sl->cabac.bytestream_end - sl->cabac.bytestream);
2661  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2662  sl->mb_y, ER_MB_ERROR);
2663  return AVERROR_INVALIDDATA;
2664  }
2665 
2666  if (++sl->mb_x >= h->mb_width) {
2667  loop_filter(h, sl, lf_x_start, sl->mb_x);
2668  sl->mb_x = lf_x_start = 0;
2669  decode_finish_row(h, sl);
2670  ++sl->mb_y;
2671  if (FIELD_OR_MBAFF_PICTURE(h)) {
2672  ++sl->mb_y;
2673  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2675  }
2676  }
2677 
2678  if (eos || sl->mb_y >= h->mb_height) {
2679  ff_tlog(h->avctx, "slice end %d %d\n",
2680  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2681  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2682  sl->mb_y, ER_MB_END);
2683  if (sl->mb_x > lf_x_start)
2684  loop_filter(h, sl, lf_x_start, sl->mb_x);
2685  goto finish;
2686  }
2687  }
2688  } else {
2689  for (;;) {
2690  int ret;
2691 
2692  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2693  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2694  sl->next_slice_idx);
2695  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2696  sl->mb_y, ER_MB_ERROR);
2697  return AVERROR_INVALIDDATA;
2698  }
2699 
2700  ret = ff_h264_decode_mb_cavlc(h, sl);
2701 
2702  if (ret >= 0)
2703  ff_h264_hl_decode_mb(h, sl);
2704 
2705  // FIXME optimal? or let mb_decode decode 16x32 ?
2706  if (ret >= 0 && FRAME_MBAFF(h)) {
2707  sl->mb_y++;
2708  ret = ff_h264_decode_mb_cavlc(h, sl);
2709 
2710  if (ret >= 0)
2711  ff_h264_hl_decode_mb(h, sl);
2712  sl->mb_y--;
2713  }
2714 
2715  if (ret < 0) {
2717  "error while decoding MB %d %d\n", sl->mb_x, sl->mb_y);
2718  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2719  sl->mb_y, ER_MB_ERROR);
2720  return ret;
2721  }
2722 
2723  if (++sl->mb_x >= h->mb_width) {
2724  loop_filter(h, sl, lf_x_start, sl->mb_x);
2725  sl->mb_x = lf_x_start = 0;
2726  decode_finish_row(h, sl);
2727  ++sl->mb_y;
2728  if (FIELD_OR_MBAFF_PICTURE(h)) {
2729  ++sl->mb_y;
2730  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2732  }
2733  if (sl->mb_y >= h->mb_height) {
2734  ff_tlog(h->avctx, "slice end %d %d\n",
2735  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2736 
2737  if ( get_bits_left(&sl->gb) == 0
2738  || get_bits_left(&sl->gb) > 0 && !(h->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
2739  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2740  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2741 
2742  goto finish;
2743  } else {
2744  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2745  sl->mb_x, sl->mb_y, ER_MB_END);
2746 
2747  return AVERROR_INVALIDDATA;
2748  }
2749  }
2750  }
2751 
2752  if (get_bits_left(&sl->gb) <= 0 && sl->mb_skip_run <= 0) {
2753  ff_tlog(h->avctx, "slice end %d %d\n",
2754  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2755 
2756  if (get_bits_left(&sl->gb) == 0) {
2757  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2758  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2759  if (sl->mb_x > lf_x_start)
2760  loop_filter(h, sl, lf_x_start, sl->mb_x);
2761 
2762  goto finish;
2763  } else {
2764  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2765  sl->mb_y, ER_MB_ERROR);
2766 
2767  return AVERROR_INVALIDDATA;
2768  }
2769  }
2770  }
2771  }
2772 
2773 finish:
2774  sl->deblocking_filter = orig_deblock;
2775  return 0;
2776 }
2777 
2778 /**
2779  * Call decode_slice() for each context.
2780  *
2781  * @param h h264 master context
2782  */
2784 {
2785  AVCodecContext *const avctx = h->avctx;
2786  H264SliceContext *sl;
2787  int context_count = h->nb_slice_ctx_queued;
2788  int ret = 0;
2789  int i, j;
2790 
2791  h->slice_ctx[0].next_slice_idx = INT_MAX;
2792 
2793  if (h->avctx->hwaccel || context_count < 1)
2794  return 0;
2795 
2796  av_assert0(context_count && h->slice_ctx[context_count - 1].mb_y < h->mb_height);
2797 
2798  if (context_count == 1) {
2799 
2800  h->slice_ctx[0].next_slice_idx = h->mb_width * h->mb_height;
2801  h->postpone_filter = 0;
2802 
2803  ret = decode_slice(avctx, &h->slice_ctx[0]);
2804  h->mb_y = h->slice_ctx[0].mb_y;
2805  if (ret < 0)
2806  goto finish;
2807  } else {
2808  av_assert0(context_count > 0);
2809  for (i = 0; i < context_count; i++) {
2810  int next_slice_idx = h->mb_width * h->mb_height;
2811  int slice_idx;
2812 
2813  sl = &h->slice_ctx[i];
2814  if (CONFIG_ERROR_RESILIENCE) {
2815  sl->er.error_count = 0;
2816  }
2817 
2818  /* make sure none of those slices overlap */
2819  slice_idx = sl->mb_y * h->mb_width + sl->mb_x;
2820  for (j = 0; j < context_count; j++) {
2821  H264SliceContext *sl2 = &h->slice_ctx[j];
2822  int slice_idx2 = sl2->mb_y * h->mb_width + sl2->mb_x;
2823 
2824  if (i == j || slice_idx2 < slice_idx)
2825  continue;
2826  next_slice_idx = FFMIN(next_slice_idx, slice_idx2);
2827  }
2828  sl->next_slice_idx = next_slice_idx;
2829  }
2830 
2831  avctx->execute(avctx, decode_slice, h->slice_ctx,
2832  NULL, context_count, sizeof(h->slice_ctx[0]));
2833 
2834  /* pull back stuff from slices to master context */
2835  sl = &h->slice_ctx[context_count - 1];
2836  h->mb_y = sl->mb_y;
2837  if (CONFIG_ERROR_RESILIENCE) {
2838  for (i = 1; i < context_count; i++)
2840  }
2841 
2842  if (h->postpone_filter) {
2843  h->postpone_filter = 0;
2844 
2845  for (i = 0; i < context_count; i++) {
2846  int y_end, x_end;
2847 
2848  sl = &h->slice_ctx[i];
2849  y_end = FFMIN(sl->mb_y + 1, h->mb_height);
2850  x_end = (sl->mb_y >= h->mb_height) ? h->mb_width : sl->mb_x;
2851 
2852  for (j = sl->resync_mb_y; j < y_end; j += 1 + FIELD_OR_MBAFF_PICTURE(h)) {
2853  sl->mb_y = j;
2854  loop_filter(h, sl, j > sl->resync_mb_y ? 0 : sl->resync_mb_x,
2855  j == y_end - 1 ? x_end : h->mb_width);
2856  }
2857  }
2858  }
2859  }
2860 
2861 finish:
2862  h->nb_slice_ctx_queued = 0;
2863  return ret;
2864 }
int chroma_format_idc
Definition: h264_ps.h:48
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
int video_signal_type_present_flag
Definition: h264_ps.h:74
struct H264Context * h264
Definition: h264dec.h:178
#define AV_EF_AGGRESSIVE
consider things that a sane encoder should not do as an error
Definition: avcodec.h:2766
#define ff_tlog(ctx,...)
Definition: internal.h:81
av_cold void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:38
#define NULL
Definition: coverity.c:32
int ff_thread_can_start_frame(AVCodecContext *avctx)
const struct AVCodec * codec
Definition: avcodec.h:1630
AVRational framerate
Definition: avcodec.h:3161
discard all frames except keyframes
Definition: avcodec.h:828
static void init_dimensions(H264Context *h)
Definition: h264_slice.c:867
int nb_mmco
Definition: h264dec.h:474
int workaround_bugs
Definition: h264dec.h:367
int long_ref
1->long term reference 0->short term reference
Definition: h264dec.h:154
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
int sei_recovery_frame_cnt
Definition: h264dec.h:163
int ff_h264_queue_decode_slice(H264Context *h, const H2645NAL *nal)
Submit a slice for decoding.
Definition: h264_slice.c:2074
H264POCContext poc
Definition: h264dec.h:460
int mb_num
Definition: h264dec.h:437
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:126
int mb_aff_frame
Definition: h264dec.h:406
int recovery_frame_cnt
recovery_frame_cnt
Definition: h264_sei.h:134
int16_t mv_cache[2][5 *8][2]
Motion vector cache.
Definition: h264dec.h:299
enum AVStereo3DView view
Determines which views are packed.
Definition: stereo3d.h:190
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:407
static int get_se_golomb(GetBitContext *gb)
read signed exp golomb code.
Definition: golomb.h:239
int edge_emu_buffer_allocated
Definition: h264dec.h:287
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1809
static void decode_finish_row(const H264Context *h, H264SliceContext *sl)
Draw edges and report progress for the last MB row.
Definition: h264_slice.c:2526
const char * fmt
Definition: avisynth_c.h:861
int first_field
Definition: h264dec.h:408
uint8_t field_scan8x8_q0[64]
Definition: h264dec.h:431
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
misc image utilities
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:379
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
#define ER_MB_END
AVFrame * f
Definition: thread.h:35
int weighted_bipred_idc
Definition: h264_ps.h:119
int ff_h264_build_ref_list(H264Context *h, H264SliceContext *sl)
Definition: h264_refs.c:299
int left_mb_xy[LEFT_MBS]
Definition: h264dec.h:211
int chroma_qp_index_offset[2]
Definition: h264_ps.h:122
AVBufferRef * sps_list[MAX_SPS_COUNT]
Definition: h264_ps.h:141
const uint8_t * bytestream_end
Definition: cabac.h:49
static av_always_inline int get_chroma_qp(const PPS *pps, int t, int qscale)
Get the chroma qp.
Definition: h264dec.h:681
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:486
hardware decoding through Videotoolbox
Definition: pixfmt.h:282
H264ChromaContext h264chroma
Definition: h264dec.h:342
uint16_t * cbp_table
Definition: h264dec.h:413
int luma_weight_flag[2]
7.4.3.2 luma_weight_lX_flag
Definition: h264_parse.h:35
MMCO mmco[MAX_MMCO_COUNT]
memory management control operations buffer.
Definition: h264dec.h:473
static void implicit_weight_table(const H264Context *h, H264SliceContext *sl, int field)
Initialize implicit_weight table.
Definition: h264_slice.c:656
#define avpriv_request_sample(...)
Sequence parameter set.
Definition: h264_ps.h:44
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:2256
int mb_y
Definition: h264dec.h:434
int coded_picture_number
Definition: h264dec.h:363
int bitstream_restriction_flag
Definition: h264_ps.h:87
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
H264SEIAlternativeTransfer alternative_transfer
Definition: h264_sei.h:187
int num
Numerator.
Definition: rational.h:59
Timecode which conforms to SMPTE ST 12-1.
Definition: frame.h:168
AVBufferRef * mb_type_buf
Definition: h264dec.h:138
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:437
int bipred_scratchpad_allocated
Definition: h264dec.h:286
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:117
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output...
Definition: diracdec.c:67
Frame contains only the right view.
Definition: stereo3d.h:161
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:413
#define VP_START
< current MB is the first after a resync marker
AVBufferPool * mb_type_pool
Definition: h264dec.h:550
int ff_h264_init_poc(int pic_field_poc[2], int *pic_poc, const SPS *sps, H264POCContext *pc, int picture_structure, int nal_ref_idc)
Definition: h264_parse.c:277
int chroma_x_shift
Definition: h264dec.h:360
const uint8_t * buffer
Definition: get_bits.h:62
Picture parameter set.
Definition: h264_ps.h:111
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
Definition: avcodec.h:2000
int16_t(*[2] motion_val)[2]
Definition: h264dec.h:136
int flags
Definition: h264dec.h:366
void ff_h264_flush_change(H264Context *h)
Definition: h264dec.c:480
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1831
int frame_mbs_only_flag
Definition: h264_ps.h:62
int mb_height
Definition: h264dec.h:435
H264Picture * delayed_pic[MAX_DELAYED_PIC_COUNT+2]
Definition: h264dec.h:465
#define tc
Definition: regdef.h:69
int is_avc
Used to parse AVC variant of H.264.
Definition: h264dec.h:450
av_cold void ff_h264_pred_init(H264PredContext *h, int codec_id, const int bit_depth, int chroma_format_idc)
Set the intra prediction function pointers.
Definition: h264pred.c:411
AVBufferPool * ref_index_pool
Definition: h264dec.h:552
int height_from_caller
Definition: h264dec.h:543
uint8_t zigzag_scan8x8_cavlc[64]
Definition: h264dec.h:423
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:401
av_cold void ff_h264chroma_init(H264ChromaContext *c, int bit_depth)
Definition: h264chroma.c:41
ERPicture last_pic
H264SEIDisplayOrientation display_orientation
Definition: h264_sei.h:185
mpegvideo header.
int current_frame_is_frame0_flag
Definition: h264_sei.h:150
int next_slice_idx
Definition: h264dec.h:236
static const uint8_t zigzag_scan8x8_cavlc[64+1]
Definition: h264_slice.c:96
H264Context.
Definition: h264dec.h:337
discard all non intra frames
Definition: avcodec.h:827
discard all
Definition: avcodec.h:829
AVFrame * f
Definition: h264dec.h:129
Views are next to each other.
Definition: stereo3d.h:67
size_t crop_bottom
Definition: frame.h:656
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:2852
uint32_t num_units_in_tick
Definition: h264_ps.h:83
static const uint8_t field_scan[16+1]
Definition: h264_slice.c:50
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:2785
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them.reget_buffer() and buffer age optimizations no longer work.*The contents of buffers must not be written to after ff_thread_report_progress() has been called on them.This includes draw_edges().Porting codecs to frame threading
H264Picture * long_ref[32]
Definition: h264dec.h:464
#define src
Definition: vp8dsp.c:254
int profile
profile
Definition: avcodec.h:2954
int picture_structure
Definition: h264dec.h:407
if it could not because there are no more frames
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
Definition: pixfmt.h:510
#define AV_WN32A(p, v)
Definition: intreadwrite.h:538
#define AV_COPY32(d, s)
Definition: intreadwrite.h:601
void av_display_matrix_flip(int32_t matrix[9], int hflip, int vflip)
Flip the input matrix horizontally and/or vertically.
Definition: display.c:65
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264dec.h:267
#define IN_RANGE(a, b, size)
Definition: h264_slice.c:264
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: h264_slice.c:266
MMCO mmco[MAX_MMCO_COUNT]
Definition: h264dec.h:322
void av_display_rotation_set(int32_t matrix[9], double angle)
Initialize a transformation matrix describing a pure counterclockwise rotation by the specified angle...
Definition: display.c:50
Frame contains only the left view.
Definition: stereo3d.h:156
int ff_h264_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: h264_slice.c:287
Switching Intra.
Definition: avutil.h:278
int setup_finished
Definition: h264dec.h:534
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:3096
int ff_h264_execute_decode_slices(H264Context *h)
Call decode_slice() for each context.
Definition: h264_slice.c:2783
H264SEIContext sei
Definition: h264dec.h:547
AVBufferRef * buf_ref
Definition: h264_sei.h:119
unsigned int crop_top
frame_cropping_rect_top_offset
Definition: h264_ps.h:70
#define USES_LIST(a, list)
Definition: mpegutils.h:99
void ff_color_frame(AVFrame *frame, const int color[4])
Definition: utils.c:412
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
const uint8_t * bytestream
Definition: cabac.h:48
int ref2frm[MAX_SLICES][2][64]
reference to frame number lists, used in the loop filter, the first 2 are for -2,-1 ...
Definition: h264dec.h:553
int deblocking_filter_parameters_present
deblocking_filter_parameters_present_flag
Definition: h264_ps.h:123
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:215
const PPS * pps
Definition: h264_ps.h:147
4: bottom field, top field, in that order
Definition: h264_sei.h:50
static enum AVPixelFormat non_j_pixfmt(enum AVPixelFormat a)
Definition: h264_slice.c:991
uint8_t
int full_range
Definition: h264_ps.h:75
unsigned int crop_left
frame_cropping_rect_left_offset
Definition: h264_ps.h:68
int gaps_in_frame_num_allowed_flag
Definition: h264_ps.h:58
#define MB_MBAFF(h)
Definition: h264dec.h:71
int slice_alpha_c0_offset
Definition: h264dec.h:194
Stereo 3D type: this structure describes how two videos are packed within a single video surface...
Definition: stereo3d.h:176
int poc
Definition: h264dec.h:171
void ff_h264_set_erpic(ERPicture *dst, H264Picture *src)
Definition: h264_picture.c:130
int field_picture
whether or not picture was encoded in separate fields
Definition: h264dec.h:158
int bit_depth_chroma
bit_depth_chroma_minus8 + 8
Definition: h264_ps.h:101
void ff_h264_hl_decode_mb(const H264Context *h, H264SliceContext *sl)
Definition: h264_mb.c:799
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:2707
size_t crop_left
Definition: frame.h:657
enum AVColorPrimaries color_primaries
Definition: h264_ps.h:77
int poc
frame POC
Definition: h264dec.h:148
int frame_num_offset
for POC type 2
Definition: h264_parse.h:51
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
int chroma_weight_flag[2]
7.4.3.2 chroma_weight_lX_flag
Definition: h264_parse.h:36
Multithreading support functions.
#define ER_MB_ERROR
int cabac
entropy_coding_mode_flag
Definition: h264_ps.h:113
#define MB_FIELD(sl)
Definition: h264dec.h:72
const char * from
Definition: jacosubdec.c:65
unsigned int crop_right
frame_cropping_rect_right_offset
Definition: h264_ps.h:69
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:412
uint8_t(*[2] top_borders)[(16 *3)*2]
Definition: h264dec.h:285
int invalid_gap
Definition: h264dec.h:162
ERPicture cur_pic
int frame_recovered
Initial frame has been completely recovered.
Definition: h264dec.h:524
Structure to hold side data for an AVFrame.
Definition: frame.h:201
int height
Definition: h264dec.h:359
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:38
#define height
#define MAX_PPS_COUNT
Definition: h264_ps.h:38
int pt
Definition: rtp.c:35
int transform_bypass
qpprime_y_zero_transform_bypass_flag
Definition: h264_ps.h:49
static void finish(void)
Definition: movenc.c:345
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:219
void ff_h264_filter_mb(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
char av_get_picture_type_char(enum AVPictureType pict_type)
Return a single letter to describe the given picture type pict_type.
Definition: utils.c:88
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
#define ER_MV_END
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:119
int redundant_pic_cnt_present
redundant_pic_cnt_present_flag
Definition: h264_ps.h:125
int picture_structure
Definition: h264dec.h:240
int chroma_y_shift
Definition: h264dec.h:360
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:442
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:55
AVBufferRef * qscale_table_buf
Definition: h264dec.h:132
static int h264_export_frame_props(H264Context *h)
Definition: h264_slice.c:1126
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:402
H264Picture * parent
Definition: h264dec.h:174
#define FRAME_RECOVERED_SEI
Sufficient number of frames have been decoded since a SEI recovery point, so all the following frames...
Definition: h264dec.h:522
H264SEIAFD afd
Definition: h264_sei.h:179
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:901
int recovered
picture at IDR or recovery point + recovery count
Definition: h264dec.h:161
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:89
#define AV_COPY64(d, s)
Definition: intreadwrite.h:605
int ff_h264_decode_ref_pic_list_reordering(H264SliceContext *sl, void *logctx)
Definition: h264_refs.c:423
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:2263
#define FFALIGN(x, a)
Definition: macros.h:48
int chroma_qp[2]
Definition: h264dec.h:188
#define av_log(a,...)
int last_pocs[MAX_DELAYED_PIC_COUNT]
Definition: h264dec.h:466
const char * to
Definition: webvttdec.c:34
void ff_h264_direct_ref_list_init(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:121
int width
Definition: h264dec.h:359
static int h264_frame_start(H264Context *h)
Definition: h264_slice.c:455
H.264 common definitions.
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264dec.c:101
#define U(x)
Definition: vp56_arith.h:37
int timecode_cnt
Number of timecode in use.
Definition: h264_sei.h:110
#define HWACCEL_MAX
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:849
H.264 parameter set handling.
H264Picture DPB[H264_MAX_PICTURE_COUNT]
Definition: h264dec.h:345
enum AVColorTransferCharacteristic color_trc
Definition: h264_ps.h:78
int mb_aff
mb_adaptive_frame_field_flag
Definition: h264_ps.h:63
H264PredContext hpc
Definition: h264dec.h:386
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
AVBufferRef * sps_ref
Definition: h264_ps.h:145
int chroma_log2_weight_denom
Definition: h264_parse.h:34
int width
Definition: frame.h:353
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:1911
#define td
Definition: regdef.h:70
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
static int get_ue_golomb(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to 8190.
Definition: golomb.h:55
static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
Definition: h264_slice.c:128
int poc_type
pic_order_cnt_type
Definition: h264_ps.h:51
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
int context_initialized
Definition: h264dec.h:365
#define PTRDIFF_SPECIFIER
Definition: internal.h:261
ERContext er
Definition: h264dec.h:180
int nal_unit_type
Definition: h264dec.h:443
int ff_h264_decode_ref_pic_marking(H264SliceContext *sl, GetBitContext *gb, const H2645NAL *nal, void *logctx)
Definition: h264_refs.c:834
int ff_h264_get_profile(const SPS *sps)
Compute profile from profile_idc and constraint_set?_flags.
Definition: h264_parse.c:529
int num_reorder_frames
Definition: h264_ps.h:88
discard all bidirectional frames
Definition: avcodec.h:826
H264_SEI_FpaType arrangement_type
Definition: h264_sei.h:146
void * hwaccel_picture_private
hardware accelerator private data
Definition: h264dec.h:142
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2577
Display matrix.
Views are packed per line, as if interlaced.
Definition: stereo3d.h:129
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:2899
static const uint8_t field_scan8x8[64+1]
Definition: h264_slice.c:57
const uint8_t ff_zigzag_scan[16+1]
Definition: mathtables.c:109
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:58
#define FIELD_PICTURE(h)
Definition: h264dec.h:74
int picture_idr
Definition: h264dec.h:378
const char * arg
Definition: jacosubdec.c:66
int deblocking_filter
disable_deblocking_filter_idc with 1 <-> 0
Definition: h264dec.h:193
uint8_t(*[2] mvd_table)[2]
Definition: h264dec.h:417
int prev_interlaced_frame
Complement sei_pic_struct SEI_PIC_STRUCT_TOP_BOTTOM and SEI_PIC_STRUCT_BOTTOM_TOP indicate interlaced...
Definition: h264dec.h:498
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:1701
static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
Definition: h264_slice.c:2301
ThreadFrame tf
Definition: h264dec.h:130
simple assert() macros that are a bit more flexible than ISO C assert().
int weighted_pred
weighted_pred_flag
Definition: h264_ps.h:118
#define PICT_TOP_FIELD
Definition: mpegutils.h:37
H264QpelContext h264qpel
Definition: h264dec.h:343
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:400
int direct_spatial_mv_pred
Definition: h264dec.h:251
H264SEIUnregistered unregistered
Definition: h264_sei.h:181
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264dec.h:149
const uint8_t ff_h264_golomb_to_pict_type[5]
Definition: h264data.c:37
#define MAX_SLICES
Definition: dxva2_hevc.c:29
int valid_recovery_point
Are the SEI recovery points looking valid.
Definition: h264dec.h:503
GLsizei count
Definition: opengl_enc.c:108
int ff_h264_get_slice_type(const H264SliceContext *sl)
Reconstruct bitstream slice_type.
Definition: h264_slice.c:2199
#define FFMAX(a, b)
Definition: common.h:94
#define fail()
Definition: checkasm.h:122
uint8_t active_format_description
Definition: h264_sei.h:115
int delta_pic_order_always_zero_flag
Definition: h264_ps.h:53
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:387
int * mb_index2xy
int slice_type_nos
S free slice type (SI/SP are remapped to I/P)
Definition: h264dec.h:184
uint8_t zigzag_scan8x8[64]
Definition: h264dec.h:422
AVBufferRef * hwaccel_priv_buf
Definition: h264dec.h:141
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:33
int crop_bottom
Definition: h264dec.h:383
uint8_t * error_status_table
size_t crop_top
Definition: frame.h:655
Views are alternated temporally.
Definition: stereo3d.h:92
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:500
int ff_h264_parse_ref_count(int *plist_count, int ref_count[2], GetBitContext *gb, const PPS *pps, int slice_type_nos, int picture_structure, void *logctx)
Definition: h264_parse.c:219
int nal_length_size
Number of bytes used for nal length (1, 2 or 4)
Definition: h264dec.h:451
useful rectangle filling function
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:395
int refs
number of reference frames
Definition: avcodec.h:2209
int prev_poc_msb
poc_msb of the last reference pic for POC type 0
Definition: h264_parse.h:49
AVBufferRef * motion_val_buf[2]
Definition: h264dec.h:135
int ref_frame_count
num_ref_frames
Definition: h264_ps.h:57
enum AVPixelFormat * pix_fmts
array of supported pixel formats, or NULL if unknown, array is terminated by -1
Definition: avcodec.h:3576
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:378
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:532
H264_SEI_PicStructType pic_struct
Definition: h264_sei.h:83
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:2750
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
int x264_build
Definition: h264dec.h:368
int ct_type
Bit set of clock types for fields/frames in picture timing SEI message.
Definition: h264_sei.h:90
void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size)
Allocate and clear a buffer, reusing the given one if large enough.
Definition: mem.c:505
#define FFMIN(a, b)
Definition: common.h:96
uint16_t * slice_table
slice_table_base + 2*mb_stride + 1
Definition: h264dec.h:403
static void copy_picture_range(H264Picture **to, H264Picture **from, int count, H264Context *new_base, H264Context *old_base)
Definition: h264_slice.c:271
static int h264_field_start(H264Context *h, const H264SliceContext *sl, const H2645NAL *nal, int first_slice)
Definition: h264_slice.c:1435
uint8_t field_scan8x8_cavlc[64]
Definition: h264dec.h:426
#define IS_DIRECT(a)
Definition: mpegutils.h:84
CABACContext cabac
Cabac.
Definition: h264dec.h:318
int colour_description_present_flag
Definition: h264_ps.h:76
unsigned int first_mb_addr
Definition: h264dec.h:234
int reference
Definition: h264dec.h:160
static void er_add_slice(H264SliceContext *sl, int startx, int starty, int endx, int endy, int status)
Definition: h264_slice.c:2557
#define LEFT_MBS
Definition: h264dec.h:75
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AVRational sar
Definition: h264_ps.h:73
#define width
AVFrameSideData * av_frame_new_side_data_from_buf(AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef *buf)
Add a new side data to a frame from an existing AVBufferRef.
Definition: frame.c:695
int width
picture width / height.
Definition: avcodec.h:1794
int redundant_pic_count
Definition: h264dec.h:244
int nb_slice_ctx
Definition: h264dec.h:351
uint8_t w
Definition: llviddspenc.c:38
H264PredWeightTable pwt
Definition: h264dec.h:197
int long_ref_count
number of actual long term references
Definition: h264dec.h:478
#define ER_DC_END
uint32_t * mb_type
Definition: h264dec.h:139
#define AV_FRAME_FLAG_CORRUPT
The frame data may be corrupted, e.g.
Definition: frame.h:520
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
Definition: h264_slice.c:1002
int size_in_bits
Definition: get_bits.h:68
int32_t
int ff_init_cabac_decoder(CABACContext *c, const uint8_t *buf, int buf_size)
Definition: cabac.c:176
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:2235
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:2892
Context Adaptive Binary Arithmetic Coder inline functions.
int level
level
Definition: avcodec.h:3074
int init_qp
pic_init_qp_minus26 + 26
Definition: h264_ps.h:120
H.264 / AVC / MPEG-4 part10 codec.
enum AVChromaLocation chroma_location
Definition: h264_ps.h:80
int mmco_reset
Definition: h264dec.h:475
H264SliceContext * slice_ctx
Definition: h264dec.h:350
int direct_8x8_inference_flag
Definition: h264_ps.h:64
static int h264_select_output_frame(H264Context *h)
Definition: h264_slice.c:1332
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:2761
int reference
Definition: h264dec.h:170
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:1753
int top_borders_allocated[2]
Definition: h264dec.h:288
static void fill_rectangle(int x, int y, int w, int h)
Definition: ffplay.c:830
#define FIELD_OR_MBAFF_PICTURE(h)
Definition: h264dec.h:91
int ref_idc
H.264 only, nal_ref_idc.
Definition: h2645_parse.h:70
static void init_scan_tables(H264Context *h)
initialize scan tables
Definition: h264_slice.c:720
static int av_unused get_cabac_terminate(CABACContext *c)
int quincunx_sampling_flag
Definition: h264_sei.h:149
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:396
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:415
HW acceleration through CUDA.
Definition: pixfmt.h:235
int type
NAL unit type.
Definition: h2645_parse.h:52
#define FF_ARRAY_ELEMS(a)
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:535
static int init_table_pools(H264Context *h)
Definition: h264_slice.c:160
uint8_t * edge_emu_buffer
Definition: h264dec.h:284
if(ret)
static unsigned get_ue_golomb_long(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1.
Definition: golomb.h:105
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
int pic_order_present
pic_order_present_flag
Definition: h264_ps.h:114
uint8_t zigzag_scan_q0[16]
Definition: h264dec.h:427
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:368
int bit_depth_luma
luma bit depth from sps to detect changes
Definition: h264dec.h:453
int chroma_format_idc
chroma format from sps to detect changes
Definition: h264dec.h:454
VideoDSPContext vdsp
Definition: h264dec.h:340
int timing_info_present_flag
Definition: h264_ps.h:82
int coded_picture_number
picture number in bitstream order
Definition: frame.h:409
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
int mb_stride
Definition: h264dec.h:436
Views are packed in a checkerboard-like structure per pixel.
Definition: stereo3d.h:104
int postpone_filter
Definition: h264dec.h:373
#define IS_INTERLACED(a)
Definition: mpegutils.h:83
AVCodecContext * avctx
Definition: h264dec.h:339
uint8_t zigzag_scan8x8_q0[64]
Definition: h264dec.h:428
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:405
5: top field, bottom field, top field repeated, in that order
Definition: h264_sei.h:51
Libavcodec external API header.
#define MAX_DELAYED_PIC_COUNT
Definition: h264dec.h:56
Views are on top of each other.
Definition: stereo3d.h:79
int last_qscale_diff
Definition: h264dec.h:190
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: frame.h:84
AVBufferRef * pps_list[MAX_PPS_COUNT]
Definition: h264_ps.h:142
enum AVCodecID codec_id
Definition: avcodec.h:1631
static int get_ue_golomb_31(GetBitContext *gb)
read unsigned exp golomb code, constraint to a max of 31.
Definition: golomb.h:120
int crop_left
Definition: h264dec.h:380
int delta_poc_bottom
Definition: h264_parse.h:46
ERPicture next_pic
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
H264Picture * short_ref[32]
Definition: h264dec.h:463
int next_outputed_poc
Definition: h264dec.h:468
int ff_h264_decode_mb_cabac(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cabac.c:1911
int explicit_ref_marking
Definition: h264dec.h:476
#define AV_CODEC_FLAG2_FAST
Allow non spec compliant speedup tricks.
Definition: avcodec.h:941
int field_poc[2]
top/bottom POC
Definition: h264dec.h:147
int debug
debug
Definition: avcodec.h:2706
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
int recovery_frame
recovery_frame is the frame_num at which the next frame should be fully constructed.
Definition: h264dec.h:511
main external API structure.
Definition: avcodec.h:1621
int qp_thresh
QP threshold to skip loopfilter.
Definition: h264dec.h:189
int explicit_ref_marking
Definition: h264dec.h:324
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
uint8_t * data
The data buffer.
Definition: buffer.h:89
H264SEITimeCode timecode[3]
Maximum three timecodes in a pic_timing SEI.
Definition: h264_sei.h:105
#define fp
Definition: regdef.h:44
uint8_t * data
Definition: frame.h:203
static int h264_slice_header_parse(const H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1721
H264SEIA53Caption a53_caption
Definition: h264_sei.h:180
void * buf
Definition: avisynth_c.h:766
int implicit_weight[48][48][2]
Definition: h264_parse.h:40
size_t crop_right
Definition: frame.h:658
int8_t * qscale_table
Definition: h264dec.h:133
static const uint8_t scan8[16 *3+3]
Definition: h264dec.h:644
#define CABAC(h)
Definition: h264_cabac.c:28
AVBufferRef * av_buffer_allocz(int size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
Definition: buffer.c:84
AVBuffer * buffer
Definition: buffer.h:82
static const uint8_t field_scan8x8_cavlc[64+1]
Definition: h264_slice.c:76
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:498
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:397
AVCodecContext * owner[2]
Definition: thread.h:36
int coded_height
Definition: avcodec.h:1809
Switching Predicted.
Definition: avutil.h:279
int prev_frame_num
frame_num of the last pic for POC type 1/2
Definition: h264_parse.h:53
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:3285
static int FUNC() pps(CodedBitstreamContext *ctx, RWContext *rw, H264RawPPS *current)
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, int size)
Add a new side data to a frame.
Definition: frame.c:727
uint8_t non_zero_count_cache[15 *8]
non zero coeff count cache.
Definition: h264dec.h:294
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:276
#define FRAME_MBAFF(h)
Definition: h264dec.h:73
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2249
Rational number (pair of numerator and denominator).
Definition: rational.h:58
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:2242
#define LBOT
Definition: h264dec.h:77
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:197
static av_always_inline uint32_t pack16to32(unsigned a, unsigned b)
Definition: h264dec.h:660
int8_t * ref_index[2]
Definition: h264dec.h:145
int ff_h264_ref_picture(H264Context *h, H264Picture *dst, H264Picture *src)
Definition: h264_picture.c:65
A reference counted buffer type.
int pixel_shift
0 for 8-bit H.264, 1 for high-bit-depth H.264
Definition: h264dec.h:356
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
int mmco_reset
MMCO_RESET set this 1.
Definition: h264dec.h:150
int content_interpretation_type
Definition: h264_sei.h:148
H264Picture * cur_pic_ptr
Definition: h264dec.h:346
#define LIST_NOT_USED
Definition: h264dec.h:390
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
ptrdiff_t mb_uvlinesize
Definition: h264dec.h:228
static int h264_slice_header_init(H264Context *h)
Definition: h264_slice.c:907
static int FUNC() sps(CodedBitstreamContext *ctx, RWContext *rw, H264RawSPS *current)
int mb_mbaff
mb_aff_frame && mb_field_decoding_flag
Definition: h264dec.h:242
enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Wrapper around get_format() for frame-multithreaded codecs.
int enable_er
Definition: h264dec.h:545
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:394
#define IS_INTER(a)
Definition: mpegutils.h:79
#define FF_COMPLIANCE_STRICT
Strictly conform to all the things in the spec no matter what consequences.
Definition: avcodec.h:2686
const SPS * sps
Definition: h264_ps.h:148
unsigned int sps_id
Definition: h264_ps.h:112
#define TRANSPOSE(x)
H264SEIPictureTiming picture_timing
Definition: h264_sei.h:178
int width_from_caller
Definition: h264dec.h:542
int log2_max_poc_lsb
log2_max_pic_order_cnt_lsb_minus4
Definition: h264_ps.h:52
H264SEIRecoveryPoint recovery_point
Definition: h264_sei.h:182
ptrdiff_t mb_linesize
may be equal to s->linesize or s->linesize * 2, for mbaff
Definition: h264dec.h:227
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:137
int16_t slice_row[MAX_SLICES]
to detect when MAX_SLICES is too low
Definition: h264dec.h:538
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
3: top field, bottom field, in that order
Definition: h264_sei.h:49
static int alloc_picture(H264Context *h, H264Picture *pic)
Definition: h264_slice.c:187
ptrdiff_t linesize
Definition: h264dec.h:226
int block_offset[2 *(16 *3)]
block_offset[ 0..23] for frame macroblocks block_offset[24..47] for field macroblocks ...
Definition: h264dec.h:397
uint32_t time_scale
Definition: h264_ps.h:84
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:406
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:414
int transform_8x8_mode
transform_8x8_mode_flag
Definition: h264_ps.h:126
ptrdiff_t uvlinesize
Definition: h264dec.h:226
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:398
static int h264_slice_init(H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1938
int pic_struct_present_flag
Definition: h264_ps.h:94
#define CHROMA444(h)
Definition: h264dec.h:99
unsigned int list_count
Definition: h264dec.h:268
uint8_t zigzag_scan[16]
Definition: h264dec.h:421
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:404
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
int prev_poc_lsb
poc_lsb of the last reference pic for POC type 0
Definition: h264_parse.h:50
static void release_unused_pictures(H264Context *h, int remove_current)
Definition: h264_slice.c:115
int ff_h264_alloc_tables(H264Context *h)
Allocate tables.
Definition: h264dec.c:179
#define AV_ZERO128(d)
Definition: intreadwrite.h:637
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:313
the normal 219*2^(n-8) "MPEG" YUV ranges
Definition: pixfmt.h:534
int left_type[LEFT_MBS]
Definition: h264dec.h:216
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
int nb_slice_ctx_queued
Definition: h264dec.h:352
discard all non reference
Definition: avcodec.h:825
int ff_h264_field_end(H264Context *h, H264SliceContext *sl, int in_setup)
Definition: h264_picture.c:153
AVBufferPool * qscale_table_pool
Definition: h264dec.h:549
H264Picture * next_output_pic
Definition: h264dec.h:467
int mb_height
Definition: h264_ps.h:61
AVBufferPool * motion_val_pool
Definition: h264dec.h:551
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
int delta_poc_bottom
Definition: h264dec.h:328
#define IS_8x8DCT(a)
Definition: h264dec.h:104
common internal api header.
AVBufferPool * av_buffer_pool_init(int size, AVBufferRef *(*alloc)(int size))
Allocate and initialize a buffer pool.
Definition: buffer.c:239
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
#define AV_COPY128(d, s)
Definition: intreadwrite.h:609
static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
Definition: h264_slice.c:754
AVBufferRef * pps_ref
Definition: h264_ps.h:144
int log2_max_frame_num
log2_max_frame_num_minus4 + 4
Definition: h264_ps.h:50
int missing_fields
Definition: h264dec.h:528
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:2918
H264ParamSets ps
Definition: h264dec.h:456
H264SEIFramePacking frame_packing
Definition: h264_sei.h:184
H.264 / AVC / MPEG-4 part10 motion vector prediction.
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:94
Bi-dir predicted.
Definition: avutil.h:276
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
Stereoscopic video.
Views are packed per column.
Definition: stereo3d.h:141
int cur_chroma_format_idc
Definition: h264dec.h:536
int8_t * intra4x4_pred_mode
Definition: h264dec.h:206
unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:3283
enum AVDiscard skip_loop_filter
Skip loop filtering for selected frames.
Definition: avcodec.h:3082
int den
Denominator.
Definition: rational.h:60
static void predict_field_decoding_flag(const H264Context *h, H264SliceContext *sl)
Definition: h264_slice.c:2513
int ff_h264_decode_mb_cavlc(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cavlc.c:702
GetBitContext gb
Definition: h2645_parse.h:47
int bit_depth_luma
bit_depth_luma_minus8 + 8
Definition: h264_ps.h:100
#define IS_INTRA(x, y)
int present
Definition: h264_sei.h:114
int delta_poc[2]
Definition: h264_parse.h:47
void ff_h264_free_tables(H264Context *h)
Definition: h264dec.c:136
void * priv_data
Definition: avcodec.h:1648
#define LTOP
Definition: h264dec.h:76
#define PICT_FRAME
Definition: mpegutils.h:39
static av_always_inline void backup_mb_border(const H264Context *h, H264SliceContext *sl, uint8_t *src_y, uint8_t *src_cb, uint8_t *src_cr, int linesize, int uvlinesize, int simple)
Definition: h264_slice.c:554
uint8_t zigzag_scan8x8_cavlc_q0[64]
Definition: h264dec.h:429
int8_t ref_cache[2][5 *8]
Definition: h264dec.h:300
#define AV_CODEC_FLAG_OUTPUT_CORRUPT
Output even those frames that might be corrupted.
Definition: avcodec.h:876
unsigned int pps_id
Definition: h264dec.h:278
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:447
int frame_priv_data_size
Size of per-frame hardware accelerator private data.
Definition: avcodec.h:3838
#define CHROMA422(h)
Definition: h264dec.h:98
#define FF_BUG_TRUNCATED
Definition: avcodec.h:2669
H264Picture cur_pic
Definition: h264dec.h:347
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:373
#define AV_ZERO32(d)
Definition: intreadwrite.h:629
int mb_width
Definition: h264dec.h:435
static int find_unused_picture(H264Context *h)
Definition: h264_slice.c:252
int current_slice
current slice number, used to initialize slice_num of each thread/context
Definition: h264dec.h:488
int ff_h264_execute_ref_pic_marking(H264Context *h)
Execute the reference picture marking (memory management control operations).
Definition: h264_refs.c:610
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:693
int ff_h264_pred_weight_table(GetBitContext *gb, const SPS *sps, const int *ref_count, int slice_type_nos, H264PredWeightTable *pwt, int picture_structure, void *logctx)
Definition: h264_parse.c:27
int mb_width
pic_width_in_mbs_minus1 + 1
Definition: h264_ps.h:59
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:1708
uint32_t * mb2b_xy
Definition: h264dec.h:399
H264Ref ref_list[2][48]
0..15: frame refs, 16..47: mbaff field refs.
Definition: h264dec.h:269
uint8_t field_scan8x8_cavlc_q0[64]
Definition: h264dec.h:432
int cur_bit_depth_luma
Definition: h264dec.h:537
int crop_top
Definition: h264dec.h:382
atomic_int error_count
AVBufferRef * ref_index_buf[2]
Definition: h264dec.h:144
av_cold void ff_h264dsp_init(H264DSPContext *c, const int bit_depth, const int chroma_format_idc)
Definition: h264dsp.c:67
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:2312
H264DSPContext h264dsp
Definition: h264dec.h:341
void ff_er_frame_start(ERContext *s)
int height
Definition: frame.h:353
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:969
FILE * out
Definition: movenc.c:54
uint8_t(*[2] mvd_table)[2]
Definition: h264dec.h:313
uint8_t field_scan8x8[64]
Definition: h264dec.h:425
int slice_type_fixed
Definition: h264dec.h:185
static av_always_inline void fill_filter_caches_inter(const H264Context *h, H264SliceContext *sl, int mb_type, int top_xy, int left_xy[LEFT_MBS], int top_type, int left_type[LEFT_MBS], int mb_xy, int list)
Definition: h264_slice.c:2217
#define av_freep(p)
int prev_frame_num_offset
for POC type 2
Definition: h264_parse.h:52
#define av_always_inline
Definition: attributes.h:39
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:229
int slice_beta_offset
Definition: h264dec.h:195
int8_t * intra4x4_pred_mode
Definition: h264dec.h:385
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:335
#define ER_AC_END
static int decode_slice(struct AVCodecContext *avctx, void *arg)
Definition: h264_slice.c:2571
int delta_poc[2]
Definition: h264dec.h:329
void ff_h264_direct_dist_scale_factor(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:62
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2465
int ff_h264_slice_context_init(H264Context *h, H264SliceContext *sl)
Init context Allocate buffers which are not shared amongst multiple threads.
Definition: h264dec.c:237
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:216
uint8_t field_scan_q0[16]
Definition: h264dec.h:430
int mb_field_decoding_flag
Definition: h264dec.h:241
uint8_t(* non_zero_count)[48]
Definition: h264dec.h:388
static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
Definition: h264_slice.c:2443
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:2920
unsigned int crop_bottom
frame_cropping_rect_bottom_offset
Definition: h264_ps.h:71
exp golomb vlc stuff
uint8_t * bipred_scratchpad
Definition: h264dec.h:283
void ff_h264_unref_picture(H264Context *h, H264Picture *pic)
Definition: h264_picture.c:44
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
av_cold void ff_h264qpel_init(H264QpelContext *c, int bit_depth)
Definition: h264qpel.c:49
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1219
int droppable
Definition: h264dec.h:362
int level_idc
Definition: h264_ps.h:47
int strict_std_compliance
strictly follow the standard (MPEG-4, ...).
Definition: avcodec.h:2684
int crop_right
Definition: h264dec.h:381
void ff_h264_filter_mb_fast(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
int nal_ref_idc
Definition: h264dec.h:442
GetBitContext gb
Definition: h264dec.h:179
uint8_t field_scan[16]
Definition: h264dec.h:424
int cabac_init_idc
Definition: h264dec.h:320
#define FRAME_RECOVERED_IDR
We have seen an IDR, so all the following frames in coded order are correctly decodable.
Definition: h264dec.h:517
for(j=16;j >0;--j)
6: bottom field, top field, bottom field repeated, in that order
Definition: h264_sei.h:52
#define FFMAX3(a, b, c)
Definition: common.h:95
int b_stride
Definition: h264dec.h:401
Predicted.
Definition: avutil.h:275
#define tb
Definition: regdef.h:68
Context Adaptive Binary Arithmetic Coder.
#define H264_MAX_PICTURE_COUNT
Definition: h264dec.h:52
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
void ff_h264_init_cabac_states(const H264Context *h, H264SliceContext *sl)
Definition: h264_cabac.c:1262
int short_ref_count
number of actual short term references
Definition: h264dec.h:479
static uint8_t tmp[11]
Definition: aes_ctr.c:26
enum AVColorSpace colorspace
Definition: h264_ps.h:79