FFmpeg
h264_slice.c
Go to the documentation of this file.
1 /*
2  * H.26L/H.264/AVC/JVT/14496-10/... decoder
3  * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * H.264 / AVC / MPEG-4 part10 codec.
25  * @author Michael Niedermayer <michaelni@gmx.at>
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/display.h"
30 #include "libavutil/imgutils.h"
31 #include "libavutil/stereo3d.h"
32 #include "internal.h"
33 #include "cabac.h"
34 #include "cabac_functions.h"
35 #include "error_resilience.h"
36 #include "avcodec.h"
37 #include "h264.h"
38 #include "h264dec.h"
39 #include "h264data.h"
40 #include "h264chroma.h"
41 #include "h264_mvpred.h"
42 #include "h264_ps.h"
43 #include "golomb.h"
44 #include "mathops.h"
45 #include "mpegutils.h"
46 #include "mpegvideo.h"
47 #include "rectangle.h"
48 #include "thread.h"
49 
50 static const uint8_t field_scan[16+1] = {
51  0 + 0 * 4, 0 + 1 * 4, 1 + 0 * 4, 0 + 2 * 4,
52  0 + 3 * 4, 1 + 1 * 4, 1 + 2 * 4, 1 + 3 * 4,
53  2 + 0 * 4, 2 + 1 * 4, 2 + 2 * 4, 2 + 3 * 4,
54  3 + 0 * 4, 3 + 1 * 4, 3 + 2 * 4, 3 + 3 * 4,
55 };
56 
57 static const uint8_t field_scan8x8[64+1] = {
58  0 + 0 * 8, 0 + 1 * 8, 0 + 2 * 8, 1 + 0 * 8,
59  1 + 1 * 8, 0 + 3 * 8, 0 + 4 * 8, 1 + 2 * 8,
60  2 + 0 * 8, 1 + 3 * 8, 0 + 5 * 8, 0 + 6 * 8,
61  0 + 7 * 8, 1 + 4 * 8, 2 + 1 * 8, 3 + 0 * 8,
62  2 + 2 * 8, 1 + 5 * 8, 1 + 6 * 8, 1 + 7 * 8,
63  2 + 3 * 8, 3 + 1 * 8, 4 + 0 * 8, 3 + 2 * 8,
64  2 + 4 * 8, 2 + 5 * 8, 2 + 6 * 8, 2 + 7 * 8,
65  3 + 3 * 8, 4 + 1 * 8, 5 + 0 * 8, 4 + 2 * 8,
66  3 + 4 * 8, 3 + 5 * 8, 3 + 6 * 8, 3 + 7 * 8,
67  4 + 3 * 8, 5 + 1 * 8, 6 + 0 * 8, 5 + 2 * 8,
68  4 + 4 * 8, 4 + 5 * 8, 4 + 6 * 8, 4 + 7 * 8,
69  5 + 3 * 8, 6 + 1 * 8, 6 + 2 * 8, 5 + 4 * 8,
70  5 + 5 * 8, 5 + 6 * 8, 5 + 7 * 8, 6 + 3 * 8,
71  7 + 0 * 8, 7 + 1 * 8, 6 + 4 * 8, 6 + 5 * 8,
72  6 + 6 * 8, 6 + 7 * 8, 7 + 2 * 8, 7 + 3 * 8,
73  7 + 4 * 8, 7 + 5 * 8, 7 + 6 * 8, 7 + 7 * 8,
74 };
75 
76 static const uint8_t field_scan8x8_cavlc[64+1] = {
77  0 + 0 * 8, 1 + 1 * 8, 2 + 0 * 8, 0 + 7 * 8,
78  2 + 2 * 8, 2 + 3 * 8, 2 + 4 * 8, 3 + 3 * 8,
79  3 + 4 * 8, 4 + 3 * 8, 4 + 4 * 8, 5 + 3 * 8,
80  5 + 5 * 8, 7 + 0 * 8, 6 + 6 * 8, 7 + 4 * 8,
81  0 + 1 * 8, 0 + 3 * 8, 1 + 3 * 8, 1 + 4 * 8,
82  1 + 5 * 8, 3 + 1 * 8, 2 + 5 * 8, 4 + 1 * 8,
83  3 + 5 * 8, 5 + 1 * 8, 4 + 5 * 8, 6 + 1 * 8,
84  5 + 6 * 8, 7 + 1 * 8, 6 + 7 * 8, 7 + 5 * 8,
85  0 + 2 * 8, 0 + 4 * 8, 0 + 5 * 8, 2 + 1 * 8,
86  1 + 6 * 8, 4 + 0 * 8, 2 + 6 * 8, 5 + 0 * 8,
87  3 + 6 * 8, 6 + 0 * 8, 4 + 6 * 8, 6 + 2 * 8,
88  5 + 7 * 8, 6 + 4 * 8, 7 + 2 * 8, 7 + 6 * 8,
89  1 + 0 * 8, 1 + 2 * 8, 0 + 6 * 8, 3 + 0 * 8,
90  1 + 7 * 8, 3 + 2 * 8, 2 + 7 * 8, 4 + 2 * 8,
91  3 + 7 * 8, 5 + 2 * 8, 4 + 7 * 8, 5 + 4 * 8,
92  6 + 3 * 8, 6 + 5 * 8, 7 + 3 * 8, 7 + 7 * 8,
93 };
94 
95 // zigzag_scan8x8_cavlc[i] = zigzag_scan8x8[(i/4) + 16*(i%4)]
96 static const uint8_t zigzag_scan8x8_cavlc[64+1] = {
97  0 + 0 * 8, 1 + 1 * 8, 1 + 2 * 8, 2 + 2 * 8,
98  4 + 1 * 8, 0 + 5 * 8, 3 + 3 * 8, 7 + 0 * 8,
99  3 + 4 * 8, 1 + 7 * 8, 5 + 3 * 8, 6 + 3 * 8,
100  2 + 7 * 8, 6 + 4 * 8, 5 + 6 * 8, 7 + 5 * 8,
101  1 + 0 * 8, 2 + 0 * 8, 0 + 3 * 8, 3 + 1 * 8,
102  3 + 2 * 8, 0 + 6 * 8, 4 + 2 * 8, 6 + 1 * 8,
103  2 + 5 * 8, 2 + 6 * 8, 6 + 2 * 8, 5 + 4 * 8,
104  3 + 7 * 8, 7 + 3 * 8, 4 + 7 * 8, 7 + 6 * 8,
105  0 + 1 * 8, 3 + 0 * 8, 0 + 4 * 8, 4 + 0 * 8,
106  2 + 3 * 8, 1 + 5 * 8, 5 + 1 * 8, 5 + 2 * 8,
107  1 + 6 * 8, 3 + 5 * 8, 7 + 1 * 8, 4 + 5 * 8,
108  4 + 6 * 8, 7 + 4 * 8, 5 + 7 * 8, 6 + 7 * 8,
109  0 + 2 * 8, 2 + 1 * 8, 1 + 3 * 8, 5 + 0 * 8,
110  1 + 4 * 8, 2 + 4 * 8, 6 + 0 * 8, 4 + 3 * 8,
111  0 + 7 * 8, 4 + 4 * 8, 7 + 2 * 8, 3 + 6 * 8,
112  5 + 5 * 8, 6 + 5 * 8, 6 + 6 * 8, 7 + 7 * 8,
113 };
114 
115 static void release_unused_pictures(H264Context *h, int remove_current)
116 {
117  int i;
118 
119  /* release non reference frames */
120  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
121  if (h->DPB[i].f->buf[0] && !h->DPB[i].reference &&
122  (remove_current || &h->DPB[i] != h->cur_pic_ptr)) {
123  ff_h264_unref_picture(h, &h->DPB[i]);
124  }
125  }
126 }
127 
128 static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
129 {
130  const H264Context *h = sl->h264;
131  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
132 
133  av_fast_malloc(&sl->bipred_scratchpad, &sl->bipred_scratchpad_allocated, 16 * 6 * alloc_size);
134  // edge emu needs blocksize + filter length - 1
135  // (= 21x21 for H.264)
136  av_fast_malloc(&sl->edge_emu_buffer, &sl->edge_emu_buffer_allocated, alloc_size * 2 * 21);
137 
139  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
141  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
142 
143  if (!sl->bipred_scratchpad || !sl->edge_emu_buffer ||
144  !sl->top_borders[0] || !sl->top_borders[1]) {
147  av_freep(&sl->top_borders[0]);
148  av_freep(&sl->top_borders[1]);
149 
152  sl->top_borders_allocated[0] = 0;
153  sl->top_borders_allocated[1] = 0;
154  return AVERROR(ENOMEM);
155  }
156 
157  return 0;
158 }
159 
161 {
162  const int big_mb_num = h->mb_stride * (h->mb_height + 1) + 1;
163  const int mb_array_size = h->mb_stride * h->mb_height;
164  const int b4_stride = h->mb_width * 4 + 1;
165  const int b4_array_size = b4_stride * h->mb_height * 4;
166 
167  h->qscale_table_pool = av_buffer_pool_init(big_mb_num + h->mb_stride,
169  h->mb_type_pool = av_buffer_pool_init((big_mb_num + h->mb_stride) *
170  sizeof(uint32_t), av_buffer_allocz);
171  h->motion_val_pool = av_buffer_pool_init(2 * (b4_array_size + 4) *
172  sizeof(int16_t), av_buffer_allocz);
173  h->ref_index_pool = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
174 
175  if (!h->qscale_table_pool || !h->mb_type_pool || !h->motion_val_pool ||
176  !h->ref_index_pool) {
181  return AVERROR(ENOMEM);
182  }
183 
184  return 0;
185 }
186 
188 {
189  int i, ret = 0;
190 
191  av_assert0(!pic->f->data[0]);
192 
193  pic->tf.f = pic->f;
194  ret = ff_thread_get_buffer(h->avctx, &pic->tf, pic->reference ?
196  if (ret < 0)
197  goto fail;
198 
199  if (h->avctx->hwaccel) {
200  const AVHWAccel *hwaccel = h->avctx->hwaccel;
202  if (hwaccel->frame_priv_data_size) {
204  if (!pic->hwaccel_priv_buf)
205  return AVERROR(ENOMEM);
207  }
208  }
209  if (CONFIG_GRAY && !h->avctx->hwaccel && h->flags & AV_CODEC_FLAG_GRAY && pic->f->data[2]) {
210  int h_chroma_shift, v_chroma_shift;
212  &h_chroma_shift, &v_chroma_shift);
213 
214  for(i=0; i<AV_CEIL_RSHIFT(pic->f->height, v_chroma_shift); i++) {
215  memset(pic->f->data[1] + pic->f->linesize[1]*i,
216  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
217  memset(pic->f->data[2] + pic->f->linesize[2]*i,
218  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
219  }
220  }
221 
222  if (!h->qscale_table_pool) {
223  ret = init_table_pools(h);
224  if (ret < 0)
225  goto fail;
226  }
227 
230  if (!pic->qscale_table_buf || !pic->mb_type_buf)
231  goto fail;
232 
233  pic->mb_type = (uint32_t*)pic->mb_type_buf->data + 2 * h->mb_stride + 1;
234  pic->qscale_table = pic->qscale_table_buf->data + 2 * h->mb_stride + 1;
235 
236  for (i = 0; i < 2; i++) {
239  if (!pic->motion_val_buf[i] || !pic->ref_index_buf[i])
240  goto fail;
241 
242  pic->motion_val[i] = (int16_t (*)[2])pic->motion_val_buf[i]->data + 4;
243  pic->ref_index[i] = pic->ref_index_buf[i]->data;
244  }
245 
246  pic->pps_buf = av_buffer_ref(h->ps.pps_ref);
247  if (!pic->pps_buf)
248  goto fail;
249  pic->pps = (const PPS*)pic->pps_buf->data;
250 
251  pic->mb_width = h->mb_width;
252  pic->mb_height = h->mb_height;
253  pic->mb_stride = h->mb_stride;
254 
255  return 0;
256 fail:
257  ff_h264_unref_picture(h, pic);
258  return (ret < 0) ? ret : AVERROR(ENOMEM);
259 }
260 
262 {
263  int i;
264 
265  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
266  if (!h->DPB[i].f->buf[0])
267  return i;
268  }
269  return AVERROR_INVALIDDATA;
270 }
271 
272 
273 #define IN_RANGE(a, b, size) (((void*)(a) >= (void*)(b)) && ((void*)(a) < (void*)((b) + (size))))
274 
275 #define REBASE_PICTURE(pic, new_ctx, old_ctx) \
276  (((pic) && (pic) >= (old_ctx)->DPB && \
277  (pic) < (old_ctx)->DPB + H264_MAX_PICTURE_COUNT) ? \
278  &(new_ctx)->DPB[(pic) - (old_ctx)->DPB] : NULL)
279 
281  H264Context *new_base,
282  H264Context *old_base)
283 {
284  int i;
285 
286  for (i = 0; i < count; i++) {
287  av_assert1(!from[i] ||
288  IN_RANGE(from[i], old_base, 1) ||
289  IN_RANGE(from[i], old_base->DPB, H264_MAX_PICTURE_COUNT));
290  to[i] = REBASE_PICTURE(from[i], new_base, old_base);
291  }
292 }
293 
295 
297  const AVCodecContext *src)
298 {
299  H264Context *h = dst->priv_data, *h1 = src->priv_data;
300  int inited = h->context_initialized, err = 0;
301  int need_reinit = 0;
302  int i, ret;
303 
304  if (dst == src)
305  return 0;
306 
307  // We can't fail if SPS isn't set at it breaks current skip_frame code
308  //if (!h1->ps.sps)
309  // return AVERROR_INVALIDDATA;
310 
311  if (inited &&
312  (h->width != h1->width ||
313  h->height != h1->height ||
314  h->mb_width != h1->mb_width ||
315  h->mb_height != h1->mb_height ||
316  !h->ps.sps ||
317  h->ps.sps->bit_depth_luma != h1->ps.sps->bit_depth_luma ||
318  h->ps.sps->chroma_format_idc != h1->ps.sps->chroma_format_idc ||
319  h->ps.sps->colorspace != h1->ps.sps->colorspace)) {
320  need_reinit = 1;
321  }
322 
323  /* copy block_offset since frame_start may not be called */
324  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
325 
326  // SPS/PPS
327  for (i = 0; i < FF_ARRAY_ELEMS(h->ps.sps_list); i++) {
328  ret = av_buffer_replace(&h->ps.sps_list[i], h1->ps.sps_list[i]);
329  if (ret < 0)
330  return ret;
331  }
332  for (i = 0; i < FF_ARRAY_ELEMS(h->ps.pps_list); i++) {
333  ret = av_buffer_replace(&h->ps.pps_list[i], h1->ps.pps_list[i]);
334  if (ret < 0)
335  return ret;
336  }
337 
338  ret = av_buffer_replace(&h->ps.pps_ref, h1->ps.pps_ref);
339  if (ret < 0)
340  return ret;
341  h->ps.pps = NULL;
342  h->ps.sps = NULL;
343  if (h1->ps.pps_ref) {
344  h->ps.pps = (const PPS*)h->ps.pps_ref->data;
345  h->ps.sps = h->ps.pps->sps;
346  }
347 
348  if (need_reinit || !inited) {
349  h->width = h1->width;
350  h->height = h1->height;
351  h->mb_height = h1->mb_height;
352  h->mb_width = h1->mb_width;
353  h->mb_num = h1->mb_num;
354  h->mb_stride = h1->mb_stride;
355  h->b_stride = h1->b_stride;
356  h->x264_build = h1->x264_build;
357 
358  if (h->context_initialized || h1->context_initialized) {
359  if ((err = h264_slice_header_init(h)) < 0) {
360  av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
361  return err;
362  }
363  }
364 
365  /* copy block_offset since frame_start may not be called */
366  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
367  }
368 
369  h->avctx->coded_height = h1->avctx->coded_height;
370  h->avctx->coded_width = h1->avctx->coded_width;
371  h->avctx->width = h1->avctx->width;
372  h->avctx->height = h1->avctx->height;
373  h->width_from_caller = h1->width_from_caller;
374  h->height_from_caller = h1->height_from_caller;
375  h->coded_picture_number = h1->coded_picture_number;
376  h->first_field = h1->first_field;
377  h->picture_structure = h1->picture_structure;
378  h->mb_aff_frame = h1->mb_aff_frame;
379  h->droppable = h1->droppable;
380 
381  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
382  ff_h264_unref_picture(h, &h->DPB[i]);
383  if (h1->DPB[i].f->buf[0] &&
384  (ret = ff_h264_ref_picture(h, &h->DPB[i], &h1->DPB[i])) < 0)
385  return ret;
386  }
387 
388  h->cur_pic_ptr = REBASE_PICTURE(h1->cur_pic_ptr, h, h1);
390  if (h1->cur_pic.f->buf[0]) {
391  ret = ff_h264_ref_picture(h, &h->cur_pic, &h1->cur_pic);
392  if (ret < 0)
393  return ret;
394  }
395 
396  h->enable_er = h1->enable_er;
397  h->workaround_bugs = h1->workaround_bugs;
398  h->droppable = h1->droppable;
399 
400  // extradata/NAL handling
401  h->is_avc = h1->is_avc;
402  h->nal_length_size = h1->nal_length_size;
403 
404  memcpy(&h->poc, &h1->poc, sizeof(h->poc));
405 
406  memcpy(h->short_ref, h1->short_ref, sizeof(h->short_ref));
407  memcpy(h->long_ref, h1->long_ref, sizeof(h->long_ref));
408  memcpy(h->delayed_pic, h1->delayed_pic, sizeof(h->delayed_pic));
409  memcpy(h->last_pocs, h1->last_pocs, sizeof(h->last_pocs));
410 
411  h->next_output_pic = h1->next_output_pic;
412  h->next_outputed_poc = h1->next_outputed_poc;
413 
414  memcpy(h->mmco, h1->mmco, sizeof(h->mmco));
415  h->nb_mmco = h1->nb_mmco;
416  h->mmco_reset = h1->mmco_reset;
417  h->explicit_ref_marking = h1->explicit_ref_marking;
418  h->long_ref_count = h1->long_ref_count;
419  h->short_ref_count = h1->short_ref_count;
420 
421  copy_picture_range(h->short_ref, h1->short_ref, 32, h, h1);
422  copy_picture_range(h->long_ref, h1->long_ref, 32, h, h1);
423  copy_picture_range(h->delayed_pic, h1->delayed_pic,
424  MAX_DELAYED_PIC_COUNT + 2, h, h1);
425 
426  h->frame_recovered = h1->frame_recovered;
427 
428  ret = av_buffer_replace(&h->sei.a53_caption.buf_ref, h1->sei.a53_caption.buf_ref);
429  if (ret < 0)
430  return ret;
431 
432  for (i = 0; i < h->sei.unregistered.nb_buf_ref; i++)
434  h->sei.unregistered.nb_buf_ref = 0;
435 
436  if (h1->sei.unregistered.nb_buf_ref) {
438  h1->sei.unregistered.nb_buf_ref,
439  sizeof(*h->sei.unregistered.buf_ref));
440  if (ret < 0)
441  return ret;
442 
443  for (i = 0; i < h1->sei.unregistered.nb_buf_ref; i++) {
444  h->sei.unregistered.buf_ref[i] = av_buffer_ref(h1->sei.unregistered.buf_ref[i]);
445  if (!h->sei.unregistered.buf_ref[i])
446  return AVERROR(ENOMEM);
448  }
449  }
450  h->sei.unregistered.x264_build = h1->sei.unregistered.x264_build;
451 
452  if (!h->cur_pic_ptr)
453  return 0;
454 
455  if (!h->droppable) {
457  h->poc.prev_poc_msb = h->poc.poc_msb;
458  h->poc.prev_poc_lsb = h->poc.poc_lsb;
459  }
462 
463  h->recovery_frame = h1->recovery_frame;
464 
465  return err;
466 }
467 
469 {
470  H264Picture *pic;
471  int i, ret;
472  const int pixel_shift = h->pixel_shift;
473 
474  if (!ff_thread_can_start_frame(h->avctx)) {
475  av_log(h->avctx, AV_LOG_ERROR, "Attempt to start a frame outside SETUP state\n");
476  return -1;
477  }
478 
480  h->cur_pic_ptr = NULL;
481 
482  i = find_unused_picture(h);
483  if (i < 0) {
484  av_log(h->avctx, AV_LOG_ERROR, "no frame buffer available\n");
485  return i;
486  }
487  pic = &h->DPB[i];
488 
489  pic->reference = h->droppable ? 0 : h->picture_structure;
492  pic->frame_num = h->poc.frame_num;
493  /*
494  * Zero key_frame here; IDR markings per slice in frame or fields are ORed
495  * in later.
496  * See decode_nal_units().
497  */
498  pic->f->key_frame = 0;
499  pic->mmco_reset = 0;
500  pic->recovered = 0;
501  pic->invalid_gap = 0;
503 
504  pic->f->pict_type = h->slice_ctx[0].slice_type;
505 
506  pic->f->crop_left = h->crop_left;
507  pic->f->crop_right = h->crop_right;
508  pic->f->crop_top = h->crop_top;
509  pic->f->crop_bottom = h->crop_bottom;
510 
511  if ((ret = alloc_picture(h, pic)) < 0)
512  return ret;
513 
514  h->cur_pic_ptr = pic;
516  if (CONFIG_ERROR_RESILIENCE) {
518  }
519 
520  if ((ret = ff_h264_ref_picture(h, &h->cur_pic, h->cur_pic_ptr)) < 0)
521  return ret;
522 
523  for (i = 0; i < h->nb_slice_ctx; i++) {
524  h->slice_ctx[i].linesize = h->cur_pic_ptr->f->linesize[0];
525  h->slice_ctx[i].uvlinesize = h->cur_pic_ptr->f->linesize[1];
526  }
527 
528  if (CONFIG_ERROR_RESILIENCE && h->enable_er) {
532  }
533 
534  for (i = 0; i < 16; i++) {
535  h->block_offset[i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
536  h->block_offset[48 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
537  }
538  for (i = 0; i < 16; i++) {
539  h->block_offset[16 + i] =
540  h->block_offset[32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
541  h->block_offset[48 + 16 + i] =
542  h->block_offset[48 + 32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
543  }
544 
545  /* We mark the current picture as non-reference after allocating it, so
546  * that if we break out due to an error it can be released automatically
547  * in the next ff_mpv_frame_start().
548  */
549  h->cur_pic_ptr->reference = 0;
550 
551  h->cur_pic_ptr->field_poc[0] = h->cur_pic_ptr->field_poc[1] = INT_MAX;
552 
553  h->next_output_pic = NULL;
554 
555  h->postpone_filter = 0;
556 
558 
559  if (h->sei.unregistered.x264_build >= 0)
561 
562  assert(h->cur_pic_ptr->long_ref == 0);
563 
564  return 0;
565 }
566 
568  uint8_t *src_y,
569  uint8_t *src_cb, uint8_t *src_cr,
570  int linesize, int uvlinesize,
571  int simple)
572 {
573  uint8_t *top_border;
574  int top_idx = 1;
575  const int pixel_shift = h->pixel_shift;
576  int chroma444 = CHROMA444(h);
577  int chroma422 = CHROMA422(h);
578 
579  src_y -= linesize;
580  src_cb -= uvlinesize;
581  src_cr -= uvlinesize;
582 
583  if (!simple && FRAME_MBAFF(h)) {
584  if (sl->mb_y & 1) {
585  if (!MB_MBAFF(sl)) {
586  top_border = sl->top_borders[0][sl->mb_x];
587  AV_COPY128(top_border, src_y + 15 * linesize);
588  if (pixel_shift)
589  AV_COPY128(top_border + 16, src_y + 15 * linesize + 16);
590  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
591  if (chroma444) {
592  if (pixel_shift) {
593  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
594  AV_COPY128(top_border + 48, src_cb + 15 * uvlinesize + 16);
595  AV_COPY128(top_border + 64, src_cr + 15 * uvlinesize);
596  AV_COPY128(top_border + 80, src_cr + 15 * uvlinesize + 16);
597  } else {
598  AV_COPY128(top_border + 16, src_cb + 15 * uvlinesize);
599  AV_COPY128(top_border + 32, src_cr + 15 * uvlinesize);
600  }
601  } else if (chroma422) {
602  if (pixel_shift) {
603  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
604  AV_COPY128(top_border + 48, src_cr + 15 * uvlinesize);
605  } else {
606  AV_COPY64(top_border + 16, src_cb + 15 * uvlinesize);
607  AV_COPY64(top_border + 24, src_cr + 15 * uvlinesize);
608  }
609  } else {
610  if (pixel_shift) {
611  AV_COPY128(top_border + 32, src_cb + 7 * uvlinesize);
612  AV_COPY128(top_border + 48, src_cr + 7 * uvlinesize);
613  } else {
614  AV_COPY64(top_border + 16, src_cb + 7 * uvlinesize);
615  AV_COPY64(top_border + 24, src_cr + 7 * uvlinesize);
616  }
617  }
618  }
619  }
620  } else if (MB_MBAFF(sl)) {
621  top_idx = 0;
622  } else
623  return;
624  }
625 
626  top_border = sl->top_borders[top_idx][sl->mb_x];
627  /* There are two lines saved, the line above the top macroblock
628  * of a pair, and the line above the bottom macroblock. */
629  AV_COPY128(top_border, src_y + 16 * linesize);
630  if (pixel_shift)
631  AV_COPY128(top_border + 16, src_y + 16 * linesize + 16);
632 
633  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
634  if (chroma444) {
635  if (pixel_shift) {
636  AV_COPY128(top_border + 32, src_cb + 16 * linesize);
637  AV_COPY128(top_border + 48, src_cb + 16 * linesize + 16);
638  AV_COPY128(top_border + 64, src_cr + 16 * linesize);
639  AV_COPY128(top_border + 80, src_cr + 16 * linesize + 16);
640  } else {
641  AV_COPY128(top_border + 16, src_cb + 16 * linesize);
642  AV_COPY128(top_border + 32, src_cr + 16 * linesize);
643  }
644  } else if (chroma422) {
645  if (pixel_shift) {
646  AV_COPY128(top_border + 32, src_cb + 16 * uvlinesize);
647  AV_COPY128(top_border + 48, src_cr + 16 * uvlinesize);
648  } else {
649  AV_COPY64(top_border + 16, src_cb + 16 * uvlinesize);
650  AV_COPY64(top_border + 24, src_cr + 16 * uvlinesize);
651  }
652  } else {
653  if (pixel_shift) {
654  AV_COPY128(top_border + 32, src_cb + 8 * uvlinesize);
655  AV_COPY128(top_border + 48, src_cr + 8 * uvlinesize);
656  } else {
657  AV_COPY64(top_border + 16, src_cb + 8 * uvlinesize);
658  AV_COPY64(top_border + 24, src_cr + 8 * uvlinesize);
659  }
660  }
661  }
662 }
663 
664 /**
665  * Initialize implicit_weight table.
666  * @param field 0/1 initialize the weight for interlaced MBAFF
667  * -1 initializes the rest
668  */
670 {
671  int ref0, ref1, i, cur_poc, ref_start, ref_count0, ref_count1;
672 
673  for (i = 0; i < 2; i++) {
674  sl->pwt.luma_weight_flag[i] = 0;
675  sl->pwt.chroma_weight_flag[i] = 0;
676  }
677 
678  if (field < 0) {
679  if (h->picture_structure == PICT_FRAME) {
680  cur_poc = h->cur_pic_ptr->poc;
681  } else {
682  cur_poc = h->cur_pic_ptr->field_poc[h->picture_structure - 1];
683  }
684  if (sl->ref_count[0] == 1 && sl->ref_count[1] == 1 && !FRAME_MBAFF(h) &&
685  sl->ref_list[0][0].poc + (int64_t)sl->ref_list[1][0].poc == 2LL * cur_poc) {
686  sl->pwt.use_weight = 0;
687  sl->pwt.use_weight_chroma = 0;
688  return;
689  }
690  ref_start = 0;
691  ref_count0 = sl->ref_count[0];
692  ref_count1 = sl->ref_count[1];
693  } else {
694  cur_poc = h->cur_pic_ptr->field_poc[field];
695  ref_start = 16;
696  ref_count0 = 16 + 2 * sl->ref_count[0];
697  ref_count1 = 16 + 2 * sl->ref_count[1];
698  }
699 
700  sl->pwt.use_weight = 2;
701  sl->pwt.use_weight_chroma = 2;
702  sl->pwt.luma_log2_weight_denom = 5;
704 
705  for (ref0 = ref_start; ref0 < ref_count0; ref0++) {
706  int64_t poc0 = sl->ref_list[0][ref0].poc;
707  for (ref1 = ref_start; ref1 < ref_count1; ref1++) {
708  int w = 32;
709  if (!sl->ref_list[0][ref0].parent->long_ref && !sl->ref_list[1][ref1].parent->long_ref) {
710  int poc1 = sl->ref_list[1][ref1].poc;
711  int td = av_clip_int8(poc1 - poc0);
712  if (td) {
713  int tb = av_clip_int8(cur_poc - poc0);
714  int tx = (16384 + (FFABS(td) >> 1)) / td;
715  int dist_scale_factor = (tb * tx + 32) >> 8;
716  if (dist_scale_factor >= -64 && dist_scale_factor <= 128)
717  w = 64 - dist_scale_factor;
718  }
719  }
720  if (field < 0) {
721  sl->pwt.implicit_weight[ref0][ref1][0] =
722  sl->pwt.implicit_weight[ref0][ref1][1] = w;
723  } else {
724  sl->pwt.implicit_weight[ref0][ref1][field] = w;
725  }
726  }
727  }
728 }
729 
730 /**
731  * initialize scan tables
732  */
734 {
735  int i;
736  for (i = 0; i < 16; i++) {
737 #define TRANSPOSE(x) ((x) >> 2) | (((x) << 2) & 0xF)
739  h->field_scan[i] = TRANSPOSE(field_scan[i]);
740 #undef TRANSPOSE
741  }
742  for (i = 0; i < 64; i++) {
743 #define TRANSPOSE(x) ((x) >> 3) | (((x) & 7) << 3)
748 #undef TRANSPOSE
749  }
750  if (h->ps.sps->transform_bypass) { // FIXME same ugly
751  memcpy(h->zigzag_scan_q0 , ff_zigzag_scan , sizeof(h->zigzag_scan_q0 ));
752  memcpy(h->zigzag_scan8x8_q0 , ff_zigzag_direct , sizeof(h->zigzag_scan8x8_q0 ));
754  memcpy(h->field_scan_q0 , field_scan , sizeof(h->field_scan_q0 ));
755  memcpy(h->field_scan8x8_q0 , field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
757  } else {
758  memcpy(h->zigzag_scan_q0 , h->zigzag_scan , sizeof(h->zigzag_scan_q0 ));
759  memcpy(h->zigzag_scan8x8_q0 , h->zigzag_scan8x8 , sizeof(h->zigzag_scan8x8_q0 ));
761  memcpy(h->field_scan_q0 , h->field_scan , sizeof(h->field_scan_q0 ));
762  memcpy(h->field_scan8x8_q0 , h->field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
764  }
765 }
766 
767 static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
768 {
769 #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
770  (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
771  CONFIG_H264_NVDEC_HWACCEL + \
772  CONFIG_H264_VAAPI_HWACCEL + \
773  CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
774  CONFIG_H264_VDPAU_HWACCEL)
775  enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
776  const enum AVPixelFormat *choices = pix_fmts;
777  int i;
778 
779  switch (h->ps.sps->bit_depth_luma) {
780  case 9:
781  if (CHROMA444(h)) {
782  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
783  *fmt++ = AV_PIX_FMT_GBRP9;
784  } else
785  *fmt++ = AV_PIX_FMT_YUV444P9;
786  } else if (CHROMA422(h))
787  *fmt++ = AV_PIX_FMT_YUV422P9;
788  else
789  *fmt++ = AV_PIX_FMT_YUV420P9;
790  break;
791  case 10:
792  if (CHROMA444(h)) {
793  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
794  *fmt++ = AV_PIX_FMT_GBRP10;
795  } else
796  *fmt++ = AV_PIX_FMT_YUV444P10;
797  } else if (CHROMA422(h))
798  *fmt++ = AV_PIX_FMT_YUV422P10;
799  else
800  *fmt++ = AV_PIX_FMT_YUV420P10;
801  break;
802  case 12:
803  if (CHROMA444(h)) {
804  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
805  *fmt++ = AV_PIX_FMT_GBRP12;
806  } else
807  *fmt++ = AV_PIX_FMT_YUV444P12;
808  } else if (CHROMA422(h))
809  *fmt++ = AV_PIX_FMT_YUV422P12;
810  else
811  *fmt++ = AV_PIX_FMT_YUV420P12;
812  break;
813  case 14:
814  if (CHROMA444(h)) {
815  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
816  *fmt++ = AV_PIX_FMT_GBRP14;
817  } else
818  *fmt++ = AV_PIX_FMT_YUV444P14;
819  } else if (CHROMA422(h))
820  *fmt++ = AV_PIX_FMT_YUV422P14;
821  else
822  *fmt++ = AV_PIX_FMT_YUV420P14;
823  break;
824  case 8:
825 #if CONFIG_H264_VDPAU_HWACCEL
826  *fmt++ = AV_PIX_FMT_VDPAU;
827 #endif
828 #if CONFIG_H264_NVDEC_HWACCEL
829  *fmt++ = AV_PIX_FMT_CUDA;
830 #endif
831  if (CHROMA444(h)) {
832  if (h->avctx->colorspace == AVCOL_SPC_RGB)
833  *fmt++ = AV_PIX_FMT_GBRP;
834  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
835  *fmt++ = AV_PIX_FMT_YUVJ444P;
836  else
837  *fmt++ = AV_PIX_FMT_YUV444P;
838  } else if (CHROMA422(h)) {
840  *fmt++ = AV_PIX_FMT_YUVJ422P;
841  else
842  *fmt++ = AV_PIX_FMT_YUV422P;
843  } else {
844 #if CONFIG_H264_DXVA2_HWACCEL
845  *fmt++ = AV_PIX_FMT_DXVA2_VLD;
846 #endif
847 #if CONFIG_H264_D3D11VA_HWACCEL
848  *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
849  *fmt++ = AV_PIX_FMT_D3D11;
850 #endif
851 #if CONFIG_H264_VAAPI_HWACCEL
852  *fmt++ = AV_PIX_FMT_VAAPI;
853 #endif
854 #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
855  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
856 #endif
857  if (h->avctx->codec->pix_fmts)
858  choices = h->avctx->codec->pix_fmts;
859  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
860  *fmt++ = AV_PIX_FMT_YUVJ420P;
861  else
862  *fmt++ = AV_PIX_FMT_YUV420P;
863  }
864  break;
865  default:
867  "Unsupported bit depth %d\n", h->ps.sps->bit_depth_luma);
868  return AVERROR_INVALIDDATA;
869  }
870 
871  *fmt = AV_PIX_FMT_NONE;
872 
873  for (i=0; choices[i] != AV_PIX_FMT_NONE; i++)
874  if (choices[i] == h->avctx->pix_fmt && !force_callback)
875  return choices[i];
876  return ff_thread_get_format(h->avctx, choices);
877 }
878 
879 /* export coded and cropped frame dimensions to AVCodecContext */
881 {
882  const SPS *sps = (const SPS*)h->ps.sps;
883  int cr = sps->crop_right;
884  int cl = sps->crop_left;
885  int ct = sps->crop_top;
886  int cb = sps->crop_bottom;
887  int width = h->width - (cr + cl);
888  int height = h->height - (ct + cb);
889  av_assert0(sps->crop_right + sps->crop_left < (unsigned)h->width);
890  av_assert0(sps->crop_top + sps->crop_bottom < (unsigned)h->height);
891 
892  /* handle container cropping */
893  if (h->width_from_caller > 0 && h->height_from_caller > 0 &&
894  !sps->crop_top && !sps->crop_left &&
895  FFALIGN(h->width_from_caller, 16) == FFALIGN(width, 16) &&
896  FFALIGN(h->height_from_caller, 16) == FFALIGN(height, 16) &&
897  h->width_from_caller <= width &&
898  h->height_from_caller <= height) {
900  height = h->height_from_caller;
901  cl = 0;
902  ct = 0;
903  cr = h->width - width;
904  cb = h->height - height;
905  } else {
906  h->width_from_caller = 0;
907  h->height_from_caller = 0;
908  }
909 
910  h->avctx->coded_width = h->width;
911  h->avctx->coded_height = h->height;
912  h->avctx->width = width;
913  h->avctx->height = height;
914  h->crop_right = cr;
915  h->crop_left = cl;
916  h->crop_top = ct;
917  h->crop_bottom = cb;
918 }
919 
921 {
922  const SPS *sps = h->ps.sps;
923  int i, ret;
924 
925  ff_set_sar(h->avctx, sps->sar);
927  &h->chroma_x_shift, &h->chroma_y_shift);
928 
929  if (sps->timing_info_present_flag) {
930  int64_t den = sps->time_scale;
931  if (h->x264_build < 44U)
932  den *= 2;
934  sps->num_units_in_tick * h->avctx->ticks_per_frame, den, 1 << 30);
935  }
936 
938 
939  h->first_field = 0;
940  h->prev_interlaced_frame = 1;
941 
942  init_scan_tables(h);
943  ret = ff_h264_alloc_tables(h);
944  if (ret < 0) {
945  av_log(h->avctx, AV_LOG_ERROR, "Could not allocate memory\n");
946  goto fail;
947  }
948 
949  if (sps->bit_depth_luma < 8 || sps->bit_depth_luma > 14 ||
950  sps->bit_depth_luma == 11 || sps->bit_depth_luma == 13
951  ) {
952  av_log(h->avctx, AV_LOG_ERROR, "Unsupported bit depth %d\n",
953  sps->bit_depth_luma);
954  ret = AVERROR_INVALIDDATA;
955  goto fail;
956  }
957 
958  h->cur_bit_depth_luma =
961  h->pixel_shift = sps->bit_depth_luma > 8;
963  h->bit_depth_luma = sps->bit_depth_luma;
964 
966  sps->chroma_format_idc);
970  sps->chroma_format_idc);
972 
973  if (!HAVE_THREADS || !(h->avctx->active_thread_type & FF_THREAD_SLICE)) {
974  ret = ff_h264_slice_context_init(h, &h->slice_ctx[0]);
975  if (ret < 0) {
976  av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
977  goto fail;
978  }
979  } else {
980  for (i = 0; i < h->nb_slice_ctx; i++) {
981  H264SliceContext *sl = &h->slice_ctx[i];
982 
983  sl->h264 = h;
984  sl->intra4x4_pred_mode = h->intra4x4_pred_mode + i * 8 * 2 * h->mb_stride;
985  sl->mvd_table[0] = h->mvd_table[0] + i * 8 * 2 * h->mb_stride;
986  sl->mvd_table[1] = h->mvd_table[1] + i * 8 * 2 * h->mb_stride;
987 
988  if ((ret = ff_h264_slice_context_init(h, sl)) < 0) {
989  av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
990  goto fail;
991  }
992  }
993  }
994 
995  h->context_initialized = 1;
996 
997  return 0;
998 fail:
1000  h->context_initialized = 0;
1001  return ret;
1002 }
1003 
1005 {
1006  switch (a) {
1010  default:
1011  return a;
1012  }
1013 }
1014 
1015 static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
1016 {
1017  const SPS *sps;
1018  int needs_reinit = 0, must_reinit, ret;
1019 
1020  if (first_slice) {
1021  av_buffer_unref(&h->ps.pps_ref);
1022  h->ps.pps = NULL;
1023  h->ps.pps_ref = av_buffer_ref(h->ps.pps_list[sl->pps_id]);
1024  if (!h->ps.pps_ref)
1025  return AVERROR(ENOMEM);
1026  h->ps.pps = (const PPS*)h->ps.pps_ref->data;
1027  }
1028 
1029  if (h->ps.sps != h->ps.pps->sps) {
1030  h->ps.sps = (const SPS*)h->ps.pps->sps;
1031 
1032  if (h->mb_width != h->ps.sps->mb_width ||
1033  h->mb_height != h->ps.sps->mb_height ||
1036  )
1037  needs_reinit = 1;
1038 
1039  if (h->bit_depth_luma != h->ps.sps->bit_depth_luma ||
1041  needs_reinit = 1;
1042  }
1043  sps = h->ps.sps;
1044 
1045  must_reinit = (h->context_initialized &&
1046  ( 16*sps->mb_width != h->avctx->coded_width
1047  || 16*sps->mb_height != h->avctx->coded_height
1048  || h->cur_bit_depth_luma != sps->bit_depth_luma
1050  || h->mb_width != sps->mb_width
1051  || h->mb_height != sps->mb_height
1052  ));
1053  if (h->avctx->pix_fmt == AV_PIX_FMT_NONE
1055  must_reinit = 1;
1056 
1057  if (first_slice && av_cmp_q(sps->sar, h->avctx->sample_aspect_ratio))
1058  must_reinit = 1;
1059 
1060  if (!h->setup_finished) {
1061  h->avctx->profile = ff_h264_get_profile(sps);
1062  h->avctx->level = sps->level_idc;
1063  h->avctx->refs = sps->ref_frame_count;
1064 
1065  h->mb_width = sps->mb_width;
1066  h->mb_height = sps->mb_height;
1067  h->mb_num = h->mb_width * h->mb_height;
1068  h->mb_stride = h->mb_width + 1;
1069 
1070  h->b_stride = h->mb_width * 4;
1071 
1072  h->chroma_y_shift = sps->chroma_format_idc <= 1; // 400 uses yuv420p
1073 
1074  h->width = 16 * h->mb_width;
1075  h->height = 16 * h->mb_height;
1076 
1077  init_dimensions(h);
1078 
1079  if (sps->video_signal_type_present_flag) {
1080  h->avctx->color_range = sps->full_range > 0 ? AVCOL_RANGE_JPEG
1081  : AVCOL_RANGE_MPEG;
1083  if (h->avctx->colorspace != sps->colorspace)
1084  needs_reinit = 1;
1086  h->avctx->color_trc = sps->color_trc;
1087  h->avctx->colorspace = sps->colorspace;
1088  }
1089  }
1090 
1091  if (h->sei.alternative_transfer.present &&
1095  }
1096  }
1098 
1099  if (!h->context_initialized || must_reinit || needs_reinit) {
1100  int flush_changes = h->context_initialized;
1101  h->context_initialized = 0;
1102  if (sl != h->slice_ctx) {
1104  "changing width %d -> %d / height %d -> %d on "
1105  "slice %d\n",
1106  h->width, h->avctx->coded_width,
1107  h->height, h->avctx->coded_height,
1108  h->current_slice + 1);
1109  return AVERROR_INVALIDDATA;
1110  }
1111 
1112  av_assert1(first_slice);
1113 
1114  if (flush_changes)
1116 
1117  if ((ret = get_pixel_format(h, 1)) < 0)
1118  return ret;
1119  h->avctx->pix_fmt = ret;
1120 
1121  av_log(h->avctx, AV_LOG_VERBOSE, "Reinit context to %dx%d, "
1122  "pix_fmt: %s\n", h->width, h->height, av_get_pix_fmt_name(h->avctx->pix_fmt));
1123 
1124  if ((ret = h264_slice_header_init(h)) < 0) {
1126  "h264_slice_header_init() failed\n");
1127  return ret;
1128  }
1129  }
1130 
1131  return 0;
1132 }
1133 
1135 {
1136  const SPS *sps = h->ps.sps;
1137  H264Picture *cur = h->cur_pic_ptr;
1138  AVFrame *out = cur->f;
1139 
1140  out->interlaced_frame = 0;
1141  out->repeat_pict = 0;
1142 
1143  /* Signal interlacing information externally. */
1144  /* Prioritize picture timing SEI information over used
1145  * decoding process if it exists. */
1146  if (h->sei.picture_timing.present) {
1148  h->avctx);
1149  if (ret < 0) {
1150  av_log(h->avctx, AV_LOG_ERROR, "Error processing a picture timing SEI\n");
1152  return ret;
1153  h->sei.picture_timing.present = 0;
1154  }
1155  }
1156 
1159  switch (pt->pic_struct) {
1161  break;
1164  out->interlaced_frame = 1;
1165  break;
1168  if (FIELD_OR_MBAFF_PICTURE(h))
1169  out->interlaced_frame = 1;
1170  else
1171  // try to flag soft telecine progressive
1173  break;
1176  /* Signal the possibility of telecined film externally
1177  * (pic_struct 5,6). From these hints, let the applications
1178  * decide if they apply deinterlacing. */
1179  out->repeat_pict = 1;
1180  break;
1182  out->repeat_pict = 2;
1183  break;
1185  out->repeat_pict = 4;
1186  break;
1187  }
1188 
1189  if ((pt->ct_type & 3) &&
1191  out->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
1192  } else {
1193  /* Derive interlacing flag from used decoding process. */
1195  }
1197 
1198  if (cur->field_poc[0] != cur->field_poc[1]) {
1199  /* Derive top_field_first from field pocs. */
1200  out->top_field_first = cur->field_poc[0] < cur->field_poc[1];
1201  } else {
1203  /* Use picture timing SEI information. Even if it is a
1204  * information of a past frame, better than nothing. */
1207  out->top_field_first = 1;
1208  else
1209  out->top_field_first = 0;
1210  } else if (out->interlaced_frame) {
1211  /* Default to top field first when pic_struct_present_flag
1212  * is not set but interlaced frame detected */
1213  out->top_field_first = 1;
1214  } else {
1215  /* Most likely progressive */
1216  out->top_field_first = 0;
1217  }
1218  }
1219 
1220  if (h->sei.frame_packing.present &&
1226  if (stereo) {
1227  switch (fp->arrangement_type) {
1229  stereo->type = AV_STEREO3D_CHECKERBOARD;
1230  break;
1232  stereo->type = AV_STEREO3D_COLUMNS;
1233  break;
1235  stereo->type = AV_STEREO3D_LINES;
1236  break;
1238  if (fp->quincunx_sampling_flag)
1240  else
1241  stereo->type = AV_STEREO3D_SIDEBYSIDE;
1242  break;
1244  stereo->type = AV_STEREO3D_TOPBOTTOM;
1245  break;
1247  stereo->type = AV_STEREO3D_FRAMESEQUENCE;
1248  break;
1249  case H264_SEI_FPA_TYPE_2D:
1250  stereo->type = AV_STEREO3D_2D;
1251  break;
1252  }
1253 
1254  if (fp->content_interpretation_type == 2)
1255  stereo->flags = AV_STEREO3D_FLAG_INVERT;
1256 
1259  stereo->view = AV_STEREO3D_VIEW_LEFT;
1260  else
1261  stereo->view = AV_STEREO3D_VIEW_RIGHT;
1262  }
1263  }
1264  }
1265 
1266  if (h->sei.display_orientation.present &&
1271  double angle = o->anticlockwise_rotation * 360 / (double) (1 << 16);
1272  AVFrameSideData *rotation = av_frame_new_side_data(out,
1274  sizeof(int32_t) * 9);
1275  if (rotation) {
1276  av_display_rotation_set((int32_t *)rotation->data, angle);
1277  av_display_matrix_flip((int32_t *)rotation->data,
1278  o->hflip, o->vflip);
1279  }
1280  }
1281 
1282  if (h->sei.afd.present) {
1284  sizeof(uint8_t));
1285 
1286  if (sd) {
1288  h->sei.afd.present = 0;
1289  }
1290  }
1291 
1292  if (h->sei.a53_caption.buf_ref) {
1293  H264SEIA53Caption *a53 = &h->sei.a53_caption;
1294 
1296  if (!sd)
1297  av_buffer_unref(&a53->buf_ref);
1298  a53->buf_ref = NULL;
1299 
1301  }
1302 
1303  for (int i = 0; i < h->sei.unregistered.nb_buf_ref; i++) {
1304  H264SEIUnregistered *unreg = &h->sei.unregistered;
1305 
1306  if (unreg->buf_ref[i]) {
1309  unreg->buf_ref[i]);
1310  if (!sd)
1311  av_buffer_unref(&unreg->buf_ref[i]);
1312  unreg->buf_ref[i] = NULL;
1313  }
1314  }
1315  h->sei.unregistered.nb_buf_ref = 0;
1316 
1317  if (h->sei.picture_timing.timecode_cnt > 0) {
1318  uint32_t *tc_sd;
1319  char tcbuf[AV_TIMECODE_STR_SIZE];
1320 
1323  sizeof(uint32_t)*4);
1324  if (!tcside)
1325  return AVERROR(ENOMEM);
1326 
1327  tc_sd = (uint32_t*)tcside->data;
1328  tc_sd[0] = h->sei.picture_timing.timecode_cnt;
1329 
1330  for (int i = 0; i < tc_sd[0]; i++) {
1331  int drop = h->sei.picture_timing.timecode[i].dropframe;
1332  int hh = h->sei.picture_timing.timecode[i].hours;
1333  int mm = h->sei.picture_timing.timecode[i].minutes;
1334  int ss = h->sei.picture_timing.timecode[i].seconds;
1335  int ff = h->sei.picture_timing.timecode[i].frame;
1336 
1337  tc_sd[i + 1] = av_timecode_get_smpte(h->avctx->framerate, drop, hh, mm, ss, ff);
1338  av_timecode_make_smpte_tc_string2(tcbuf, h->avctx->framerate, tc_sd[i + 1], 0, 0);
1339  av_dict_set(&out->metadata, "timecode", tcbuf, 0);
1340  }
1342  }
1343 
1344  return 0;
1345 }
1346 
1348 {
1349  const SPS *sps = h->ps.sps;
1350  H264Picture *out = h->cur_pic_ptr;
1351  H264Picture *cur = h->cur_pic_ptr;
1352  int i, pics, out_of_order, out_idx;
1353 
1354  cur->mmco_reset = h->mmco_reset;
1355  h->mmco_reset = 0;
1356 
1357  if (sps->bitstream_restriction_flag ||
1360  }
1361 
1362  for (i = 0; 1; i++) {
1363  if(i == MAX_DELAYED_PIC_COUNT || cur->poc < h->last_pocs[i]){
1364  if(i)
1365  h->last_pocs[i-1] = cur->poc;
1366  break;
1367  } else if(i) {
1368  h->last_pocs[i-1]= h->last_pocs[i];
1369  }
1370  }
1371  out_of_order = MAX_DELAYED_PIC_COUNT - i;
1372  if( cur->f->pict_type == AV_PICTURE_TYPE_B
1373  || (h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > INT_MIN && h->last_pocs[MAX_DELAYED_PIC_COUNT-1] - (int64_t)h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > 2))
1374  out_of_order = FFMAX(out_of_order, 1);
1375  if (out_of_order == MAX_DELAYED_PIC_COUNT) {
1376  av_log(h->avctx, AV_LOG_VERBOSE, "Invalid POC %d<%d\n", cur->poc, h->last_pocs[0]);
1377  for (i = 1; i < MAX_DELAYED_PIC_COUNT; i++)
1378  h->last_pocs[i] = INT_MIN;
1379  h->last_pocs[0] = cur->poc;
1380  cur->mmco_reset = 1;
1381  } else if(h->avctx->has_b_frames < out_of_order && !sps->bitstream_restriction_flag){
1382  int loglevel = h->avctx->frame_number > 1 ? AV_LOG_WARNING : AV_LOG_VERBOSE;
1383  av_log(h->avctx, loglevel, "Increasing reorder buffer to %d\n", out_of_order);
1384  h->avctx->has_b_frames = out_of_order;
1385  }
1386 
1387  pics = 0;
1388  while (h->delayed_pic[pics])
1389  pics++;
1390 
1392 
1393  h->delayed_pic[pics++] = cur;
1394  if (cur->reference == 0)
1395  cur->reference = DELAYED_PIC_REF;
1396 
1397  out = h->delayed_pic[0];
1398  out_idx = 0;
1399  for (i = 1; h->delayed_pic[i] &&
1400  !h->delayed_pic[i]->f->key_frame &&
1401  !h->delayed_pic[i]->mmco_reset;
1402  i++)
1403  if (h->delayed_pic[i]->poc < out->poc) {
1404  out = h->delayed_pic[i];
1405  out_idx = i;
1406  }
1407  if (h->avctx->has_b_frames == 0 &&
1408  (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset))
1409  h->next_outputed_poc = INT_MIN;
1410  out_of_order = out->poc < h->next_outputed_poc;
1411 
1412  if (out_of_order || pics > h->avctx->has_b_frames) {
1413  out->reference &= ~DELAYED_PIC_REF;
1414  for (i = out_idx; h->delayed_pic[i]; i++)
1415  h->delayed_pic[i] = h->delayed_pic[i + 1];
1416  }
1417  if (!out_of_order && pics > h->avctx->has_b_frames) {
1418  h->next_output_pic = out;
1419  if (out_idx == 0 && h->delayed_pic[0] && (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset)) {
1420  h->next_outputed_poc = INT_MIN;
1421  } else
1422  h->next_outputed_poc = out->poc;
1423 
1424  if (out->recovered) {
1425  // We have reached an recovery point and all frames after it in
1426  // display order are "recovered".
1428  }
1430 
1431  if (!out->recovered) {
1432  if (!(h->avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) &&
1434  h->next_output_pic = NULL;
1435  } else {
1436  out->f->flags |= AV_FRAME_FLAG_CORRUPT;
1437  }
1438  }
1439  } else {
1440  av_log(h->avctx, AV_LOG_DEBUG, "no picture %s\n", out_of_order ? "ooo" : "");
1441  }
1442 
1443  return 0;
1444 }
1445 
1446 /* This function is called right after decoding the slice header for a first
1447  * slice in a field (or a frame). It decides whether we are decoding a new frame
1448  * or a second field in a pair and does the necessary setup.
1449  */
1451  const H2645NAL *nal, int first_slice)
1452 {
1453  int i;
1454  const SPS *sps;
1455 
1456  int last_pic_structure, last_pic_droppable, ret;
1457 
1458  ret = h264_init_ps(h, sl, first_slice);
1459  if (ret < 0)
1460  return ret;
1461 
1462  sps = h->ps.sps;
1463 
1464  if (sps && sps->bitstream_restriction_flag &&
1465  h->avctx->has_b_frames < sps->num_reorder_frames) {
1467  }
1468 
1469  last_pic_droppable = h->droppable;
1470  last_pic_structure = h->picture_structure;
1471  h->droppable = (nal->ref_idc == 0);
1473 
1474  h->poc.frame_num = sl->frame_num;
1475  h->poc.poc_lsb = sl->poc_lsb;
1477  h->poc.delta_poc[0] = sl->delta_poc[0];
1478  h->poc.delta_poc[1] = sl->delta_poc[1];
1479 
1480  /* Shorten frame num gaps so we don't have to allocate reference
1481  * frames just to throw them away */
1482  if (h->poc.frame_num != h->poc.prev_frame_num) {
1483  int unwrap_prev_frame_num = h->poc.prev_frame_num;
1484  int max_frame_num = 1 << sps->log2_max_frame_num;
1485 
1486  if (unwrap_prev_frame_num > h->poc.frame_num)
1487  unwrap_prev_frame_num -= max_frame_num;
1488 
1489  if ((h->poc.frame_num - unwrap_prev_frame_num) > sps->ref_frame_count) {
1490  unwrap_prev_frame_num = (h->poc.frame_num - sps->ref_frame_count) - 1;
1491  if (unwrap_prev_frame_num < 0)
1492  unwrap_prev_frame_num += max_frame_num;
1493 
1494  h->poc.prev_frame_num = unwrap_prev_frame_num;
1495  }
1496  }
1497 
1498  /* See if we have a decoded first field looking for a pair...
1499  * Here, we're using that to see if we should mark previously
1500  * decode frames as "finished".
1501  * We have to do that before the "dummy" in-between frame allocation,
1502  * since that can modify h->cur_pic_ptr. */
1503  if (h->first_field) {
1504  int last_field = last_pic_structure == PICT_BOTTOM_FIELD;
1505  av_assert0(h->cur_pic_ptr);
1506  av_assert0(h->cur_pic_ptr->f->buf[0]);
1507  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1508 
1509  /* Mark old field/frame as completed */
1510  if (h->cur_pic_ptr->tf.owner[last_field] == h->avctx) {
1511  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, last_field);
1512  }
1513 
1514  /* figure out if we have a complementary field pair */
1515  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1516  /* Previous field is unmatched. Don't display it, but let it
1517  * remain for reference if marked as such. */
1518  if (last_pic_structure != PICT_FRAME) {
1520  last_pic_structure == PICT_TOP_FIELD);
1521  }
1522  } else {
1523  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1524  /* This and previous field were reference, but had
1525  * different frame_nums. Consider this field first in
1526  * pair. Throw away previous field except for reference
1527  * purposes. */
1528  if (last_pic_structure != PICT_FRAME) {
1530  last_pic_structure == PICT_TOP_FIELD);
1531  }
1532  } else {
1533  /* Second field in complementary pair */
1534  if (!((last_pic_structure == PICT_TOP_FIELD &&
1536  (last_pic_structure == PICT_BOTTOM_FIELD &&
1539  "Invalid field mode combination %d/%d\n",
1540  last_pic_structure, h->picture_structure);
1541  h->picture_structure = last_pic_structure;
1542  h->droppable = last_pic_droppable;
1543  return AVERROR_INVALIDDATA;
1544  } else if (last_pic_droppable != h->droppable) {
1546  "Found reference and non-reference fields in the same frame, which");
1547  h->picture_structure = last_pic_structure;
1548  h->droppable = last_pic_droppable;
1549  return AVERROR_PATCHWELCOME;
1550  }
1551  }
1552  }
1553  }
1554 
1555  while (h->poc.frame_num != h->poc.prev_frame_num && !h->first_field &&
1556  h->poc.frame_num != (h->poc.prev_frame_num + 1) % (1 << sps->log2_max_frame_num)) {
1557  H264Picture *prev = h->short_ref_count ? h->short_ref[0] : NULL;
1558  av_log(h->avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n",
1559  h->poc.frame_num, h->poc.prev_frame_num);
1561  for(i=0; i<FF_ARRAY_ELEMS(h->last_pocs); i++)
1562  h->last_pocs[i] = INT_MIN;
1563  ret = h264_frame_start(h);
1564  if (ret < 0) {
1565  h->first_field = 0;
1566  return ret;
1567  }
1568 
1569  h->poc.prev_frame_num++;
1570  h->poc.prev_frame_num %= 1 << sps->log2_max_frame_num;
1573  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
1574  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
1575 
1576  h->explicit_ref_marking = 0;
1578  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1579  return ret;
1580  /* Error concealment: If a ref is missing, copy the previous ref
1581  * in its place.
1582  * FIXME: Avoiding a memcpy would be nice, but ref handling makes
1583  * many assumptions about there being no actual duplicates.
1584  * FIXME: This does not copy padding for out-of-frame motion
1585  * vectors. Given we are concealing a lost frame, this probably
1586  * is not noticeable by comparison, but it should be fixed. */
1587  if (h->short_ref_count) {
1588  int c[4] = {
1589  1<<(h->ps.sps->bit_depth_luma-1),
1590  1<<(h->ps.sps->bit_depth_chroma-1),
1591  1<<(h->ps.sps->bit_depth_chroma-1),
1592  -1
1593  };
1594 
1595  if (prev &&
1596  h->short_ref[0]->f->width == prev->f->width &&
1597  h->short_ref[0]->f->height == prev->f->height &&
1598  h->short_ref[0]->f->format == prev->f->format) {
1599  ff_thread_await_progress(&prev->tf, INT_MAX, 0);
1600  if (prev->field_picture)
1601  ff_thread_await_progress(&prev->tf, INT_MAX, 1);
1602  av_image_copy(h->short_ref[0]->f->data,
1603  h->short_ref[0]->f->linesize,
1604  (const uint8_t **)prev->f->data,
1605  prev->f->linesize,
1606  prev->f->format,
1607  prev->f->width,
1608  prev->f->height);
1609  h->short_ref[0]->poc = prev->poc + 2U;
1610  } else if (!h->frame_recovered && !h->avctx->hwaccel)
1611  ff_color_frame(h->short_ref[0]->f, c);
1612  h->short_ref[0]->frame_num = h->poc.prev_frame_num;
1613  }
1614  }
1615 
1616  /* See if we have a decoded first field looking for a pair...
1617  * We're using that to see whether to continue decoding in that
1618  * frame, or to allocate a new one. */
1619  if (h->first_field) {
1620  av_assert0(h->cur_pic_ptr);
1621  av_assert0(h->cur_pic_ptr->f->buf[0]);
1622  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1623 
1624  /* figure out if we have a complementary field pair */
1625  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1626  /* Previous field is unmatched. Don't display it, but let it
1627  * remain for reference if marked as such. */
1628  h->missing_fields ++;
1629  h->cur_pic_ptr = NULL;
1630  h->first_field = FIELD_PICTURE(h);
1631  } else {
1632  h->missing_fields = 0;
1633  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1636  /* This and the previous field had different frame_nums.
1637  * Consider this field first in pair. Throw away previous
1638  * one except for reference purposes. */
1639  h->first_field = 1;
1640  h->cur_pic_ptr = NULL;
1641  } else if (h->cur_pic_ptr->reference & DELAYED_PIC_REF) {
1642  /* This frame was already output, we cannot draw into it
1643  * anymore.
1644  */
1645  h->first_field = 1;
1646  h->cur_pic_ptr = NULL;
1647  } else {
1648  /* Second field in complementary pair */
1649  h->first_field = 0;
1650  }
1651  }
1652  } else {
1653  /* Frame or first field in a potentially complementary pair */
1654  h->first_field = FIELD_PICTURE(h);
1655  }
1656 
1657  if (!FIELD_PICTURE(h) || h->first_field) {
1658  if (h264_frame_start(h) < 0) {
1659  h->first_field = 0;
1660  return AVERROR_INVALIDDATA;
1661  }
1662  } else {
1665  h->cur_pic_ptr->tf.owner[field] = h->avctx;
1666  }
1667  /* Some macroblocks can be accessed before they're available in case
1668  * of lost slices, MBAFF or threading. */
1669  if (FIELD_PICTURE(h)) {
1670  for(i = (h->picture_structure == PICT_BOTTOM_FIELD); i<h->mb_height; i++)
1671  memset(h->slice_table + i*h->mb_stride, -1, (h->mb_stride - (i+1==h->mb_height)) * sizeof(*h->slice_table));
1672  } else {
1673  memset(h->slice_table, -1,
1674  (h->mb_height * h->mb_stride - 1) * sizeof(*h->slice_table));
1675  }
1676 
1678  h->ps.sps, &h->poc, h->picture_structure, nal->ref_idc);
1679  if (ret < 0)
1680  return ret;
1681 
1682  memcpy(h->mmco, sl->mmco, sl->nb_mmco * sizeof(*h->mmco));
1683  h->nb_mmco = sl->nb_mmco;
1685 
1686  h->picture_idr = nal->type == H264_NAL_IDR_SLICE;
1687 
1688  if (h->sei.recovery_point.recovery_frame_cnt >= 0) {
1689  const int sei_recovery_frame_cnt = h->sei.recovery_point.recovery_frame_cnt;
1690 
1691  if (h->poc.frame_num != sei_recovery_frame_cnt || sl->slice_type_nos != AV_PICTURE_TYPE_I)
1692  h->valid_recovery_point = 1;
1693 
1694  if ( h->recovery_frame < 0
1695  || av_mod_uintp2(h->recovery_frame - h->poc.frame_num, h->ps.sps->log2_max_frame_num) > sei_recovery_frame_cnt) {
1696  h->recovery_frame = av_mod_uintp2(h->poc.frame_num + sei_recovery_frame_cnt, h->ps.sps->log2_max_frame_num);
1697 
1698  if (!h->valid_recovery_point)
1699  h->recovery_frame = h->poc.frame_num;
1700  }
1701  }
1702 
1703  h->cur_pic_ptr->f->key_frame |= (nal->type == H264_NAL_IDR_SLICE);
1704 
1705  if (nal->type == H264_NAL_IDR_SLICE ||
1706  (h->recovery_frame == h->poc.frame_num && nal->ref_idc)) {
1707  h->recovery_frame = -1;
1708  h->cur_pic_ptr->recovered = 1;
1709  }
1710  // If we have an IDR, all frames after it in decoded order are
1711  // "recovered".
1712  if (nal->type == H264_NAL_IDR_SLICE)
1714 #if 1
1716 #else
1718 #endif
1719 
1720  /* Set the frame properties/side data. Only done for the second field in
1721  * field coded frames, since some SEI information is present for each field
1722  * and is merged by the SEI parsing code. */
1723  if (!FIELD_PICTURE(h) || !h->first_field || h->missing_fields > 1) {
1724  ret = h264_export_frame_props(h);
1725  if (ret < 0)
1726  return ret;
1727 
1728  ret = h264_select_output_frame(h);
1729  if (ret < 0)
1730  return ret;
1731  }
1732 
1733  return 0;
1734 }
1735 
1737  const H2645NAL *nal)
1738 {
1739  const SPS *sps;
1740  const PPS *pps;
1741  int ret;
1742  unsigned int slice_type, tmp, i;
1743  int field_pic_flag, bottom_field_flag;
1744  int first_slice = sl == h->slice_ctx && !h->current_slice;
1745  int picture_structure;
1746 
1747  if (first_slice)
1749 
1750  sl->first_mb_addr = get_ue_golomb_long(&sl->gb);
1751 
1752  slice_type = get_ue_golomb_31(&sl->gb);
1753  if (slice_type > 9) {
1755  "slice type %d too large at %d\n",
1756  slice_type, sl->first_mb_addr);
1757  return AVERROR_INVALIDDATA;
1758  }
1759  if (slice_type > 4) {
1760  slice_type -= 5;
1761  sl->slice_type_fixed = 1;
1762  } else
1763  sl->slice_type_fixed = 0;
1764 
1765  slice_type = ff_h264_golomb_to_pict_type[slice_type];
1766  sl->slice_type = slice_type;
1767  sl->slice_type_nos = slice_type & 3;
1768 
1769  if (nal->type == H264_NAL_IDR_SLICE &&
1771  av_log(h->avctx, AV_LOG_ERROR, "A non-intra slice in an IDR NAL unit.\n");
1772  return AVERROR_INVALIDDATA;
1773  }
1774 
1775  sl->pps_id = get_ue_golomb(&sl->gb);
1776  if (sl->pps_id >= MAX_PPS_COUNT) {
1777  av_log(h->avctx, AV_LOG_ERROR, "pps_id %u out of range\n", sl->pps_id);
1778  return AVERROR_INVALIDDATA;
1779  }
1780  if (!h->ps.pps_list[sl->pps_id]) {
1782  "non-existing PPS %u referenced\n",
1783  sl->pps_id);
1784  return AVERROR_INVALIDDATA;
1785  }
1786  pps = (const PPS*)h->ps.pps_list[sl->pps_id]->data;
1787  sps = pps->sps;
1788 
1789  sl->frame_num = get_bits(&sl->gb, sps->log2_max_frame_num);
1790  if (!first_slice) {
1791  if (h->poc.frame_num != sl->frame_num) {
1792  av_log(h->avctx, AV_LOG_ERROR, "Frame num change from %d to %d\n",
1793  h->poc.frame_num, sl->frame_num);
1794  return AVERROR_INVALIDDATA;
1795  }
1796  }
1797 
1798  sl->mb_mbaff = 0;
1799 
1800  if (sps->frame_mbs_only_flag) {
1801  picture_structure = PICT_FRAME;
1802  } else {
1803  if (!sps->direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B) {
1804  av_log(h->avctx, AV_LOG_ERROR, "This stream was generated by a broken encoder, invalid 8x8 inference\n");
1805  return -1;
1806  }
1807  field_pic_flag = get_bits1(&sl->gb);
1808  if (field_pic_flag) {
1809  bottom_field_flag = get_bits1(&sl->gb);
1810  picture_structure = PICT_TOP_FIELD + bottom_field_flag;
1811  } else {
1812  picture_structure = PICT_FRAME;
1813  }
1814  }
1815  sl->picture_structure = picture_structure;
1816  sl->mb_field_decoding_flag = picture_structure != PICT_FRAME;
1817 
1818  if (picture_structure == PICT_FRAME) {
1819  sl->curr_pic_num = sl->frame_num;
1820  sl->max_pic_num = 1 << sps->log2_max_frame_num;
1821  } else {
1822  sl->curr_pic_num = 2 * sl->frame_num + 1;
1823  sl->max_pic_num = 1 << (sps->log2_max_frame_num + 1);
1824  }
1825 
1826  if (nal->type == H264_NAL_IDR_SLICE)
1827  get_ue_golomb_long(&sl->gb); /* idr_pic_id */
1828 
1829  if (sps->poc_type == 0) {
1830  sl->poc_lsb = get_bits(&sl->gb, sps->log2_max_poc_lsb);
1831 
1832  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1833  sl->delta_poc_bottom = get_se_golomb(&sl->gb);
1834  }
1835 
1836  if (sps->poc_type == 1 && !sps->delta_pic_order_always_zero_flag) {
1837  sl->delta_poc[0] = get_se_golomb(&sl->gb);
1838 
1839  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1840  sl->delta_poc[1] = get_se_golomb(&sl->gb);
1841  }
1842 
1843  sl->redundant_pic_count = 0;
1844  if (pps->redundant_pic_cnt_present)
1845  sl->redundant_pic_count = get_ue_golomb(&sl->gb);
1846 
1847  if (sl->slice_type_nos == AV_PICTURE_TYPE_B)
1848  sl->direct_spatial_mv_pred = get_bits1(&sl->gb);
1849 
1851  &sl->gb, pps, sl->slice_type_nos,
1852  picture_structure, h->avctx);
1853  if (ret < 0)
1854  return ret;
1855 
1856  if (sl->slice_type_nos != AV_PICTURE_TYPE_I) {
1858  if (ret < 0) {
1859  sl->ref_count[1] = sl->ref_count[0] = 0;
1860  return ret;
1861  }
1862  }
1863 
1864  sl->pwt.use_weight = 0;
1865  for (i = 0; i < 2; i++) {
1866  sl->pwt.luma_weight_flag[i] = 0;
1867  sl->pwt.chroma_weight_flag[i] = 0;
1868  }
1869  if ((pps->weighted_pred && sl->slice_type_nos == AV_PICTURE_TYPE_P) ||
1870  (pps->weighted_bipred_idc == 1 &&
1872  ret = ff_h264_pred_weight_table(&sl->gb, sps, sl->ref_count,
1873  sl->slice_type_nos, &sl->pwt,
1874  picture_structure, h->avctx);
1875  if (ret < 0)
1876  return ret;
1877  }
1878 
1879  sl->explicit_ref_marking = 0;
1880  if (nal->ref_idc) {
1881  ret = ff_h264_decode_ref_pic_marking(sl, &sl->gb, nal, h->avctx);
1882  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1883  return AVERROR_INVALIDDATA;
1884  }
1885 
1886  if (sl->slice_type_nos != AV_PICTURE_TYPE_I && pps->cabac) {
1887  tmp = get_ue_golomb_31(&sl->gb);
1888  if (tmp > 2) {
1889  av_log(h->avctx, AV_LOG_ERROR, "cabac_init_idc %u overflow\n", tmp);
1890  return AVERROR_INVALIDDATA;
1891  }
1892  sl->cabac_init_idc = tmp;
1893  }
1894 
1895  sl->last_qscale_diff = 0;
1896  tmp = pps->init_qp + (unsigned)get_se_golomb(&sl->gb);
1897  if (tmp > 51 + 6 * (sps->bit_depth_luma - 8)) {
1898  av_log(h->avctx, AV_LOG_ERROR, "QP %u out of range\n", tmp);
1899  return AVERROR_INVALIDDATA;
1900  }
1901  sl->qscale = tmp;
1902  sl->chroma_qp[0] = get_chroma_qp(pps, 0, sl->qscale);
1903  sl->chroma_qp[1] = get_chroma_qp(pps, 1, sl->qscale);
1904  // FIXME qscale / qp ... stuff
1905  if (sl->slice_type == AV_PICTURE_TYPE_SP)
1906  get_bits1(&sl->gb); /* sp_for_switch_flag */
1907  if (sl->slice_type == AV_PICTURE_TYPE_SP ||
1909  get_se_golomb(&sl->gb); /* slice_qs_delta */
1910 
1911  sl->deblocking_filter = 1;
1912  sl->slice_alpha_c0_offset = 0;
1913  sl->slice_beta_offset = 0;
1915  tmp = get_ue_golomb_31(&sl->gb);
1916  if (tmp > 2) {
1918  "deblocking_filter_idc %u out of range\n", tmp);
1919  return AVERROR_INVALIDDATA;
1920  }
1921  sl->deblocking_filter = tmp;
1922  if (sl->deblocking_filter < 2)
1923  sl->deblocking_filter ^= 1; // 1<->0
1924 
1925  if (sl->deblocking_filter) {
1926  int slice_alpha_c0_offset_div2 = get_se_golomb(&sl->gb);
1927  int slice_beta_offset_div2 = get_se_golomb(&sl->gb);
1928  if (slice_alpha_c0_offset_div2 > 6 ||
1929  slice_alpha_c0_offset_div2 < -6 ||
1930  slice_beta_offset_div2 > 6 ||
1931  slice_beta_offset_div2 < -6) {
1933  "deblocking filter parameters %d %d out of range\n",
1934  slice_alpha_c0_offset_div2, slice_beta_offset_div2);
1935  return AVERROR_INVALIDDATA;
1936  }
1937  sl->slice_alpha_c0_offset = slice_alpha_c0_offset_div2 * 2;
1938  sl->slice_beta_offset = slice_beta_offset_div2 * 2;
1939  }
1940  }
1941 
1942  return 0;
1943 }
1944 
1945 /* do all the per-slice initialization needed before we can start decoding the
1946  * actual MBs */
1948  const H2645NAL *nal)
1949 {
1950  int i, j, ret = 0;
1951 
1952  if (h->picture_idr && nal->type != H264_NAL_IDR_SLICE) {
1953  av_log(h->avctx, AV_LOG_ERROR, "Invalid mix of IDR and non-IDR slices\n");
1954  return AVERROR_INVALIDDATA;
1955  }
1956 
1957  av_assert1(h->mb_num == h->mb_width * h->mb_height);
1958  if (sl->first_mb_addr << FIELD_OR_MBAFF_PICTURE(h) >= h->mb_num ||
1959  sl->first_mb_addr >= h->mb_num) {
1960  av_log(h->avctx, AV_LOG_ERROR, "first_mb_in_slice overflow\n");
1961  return AVERROR_INVALIDDATA;
1962  }
1963  sl->resync_mb_x = sl->mb_x = sl->first_mb_addr % h->mb_width;
1964  sl->resync_mb_y = sl->mb_y = (sl->first_mb_addr / h->mb_width) <<
1967  sl->resync_mb_y = sl->mb_y = sl->mb_y + 1;
1968  av_assert1(sl->mb_y < h->mb_height);
1969 
1970  ret = ff_h264_build_ref_list(h, sl);
1971  if (ret < 0)
1972  return ret;
1973 
1974  if (h->ps.pps->weighted_bipred_idc == 2 &&
1976  implicit_weight_table(h, sl, -1);
1977  if (FRAME_MBAFF(h)) {
1978  implicit_weight_table(h, sl, 0);
1979  implicit_weight_table(h, sl, 1);
1980  }
1981  }
1982 
1985  if (!h->setup_finished)
1987 
1988  if (h->avctx->skip_loop_filter >= AVDISCARD_ALL ||
1996  nal->ref_idc == 0))
1997  sl->deblocking_filter = 0;
1998 
1999  if (sl->deblocking_filter == 1 && h->nb_slice_ctx > 1) {
2000  if (h->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
2001  /* Cheat slightly for speed:
2002  * Do not bother to deblock across slices. */
2003  sl->deblocking_filter = 2;
2004  } else {
2005  h->postpone_filter = 1;
2006  }
2007  }
2008  sl->qp_thresh = 15 -
2010  FFMAX3(0,
2011  h->ps.pps->chroma_qp_index_offset[0],
2012  h->ps.pps->chroma_qp_index_offset[1]) +
2013  6 * (h->ps.sps->bit_depth_luma - 8);
2014 
2015  sl->slice_num = ++h->current_slice;
2016 
2017  if (sl->slice_num)
2018  h->slice_row[(sl->slice_num-1)&(MAX_SLICES-1)]= sl->resync_mb_y;
2019  if ( h->slice_row[sl->slice_num&(MAX_SLICES-1)] + 3 >= sl->resync_mb_y
2020  && h->slice_row[sl->slice_num&(MAX_SLICES-1)] <= sl->resync_mb_y
2021  && sl->slice_num >= MAX_SLICES) {
2022  //in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
2023  av_log(h->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", sl->slice_num, MAX_SLICES);
2024  }
2025 
2026  for (j = 0; j < 2; j++) {
2027  int id_list[16];
2028  int *ref2frm = h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][j];
2029  for (i = 0; i < 16; i++) {
2030  id_list[i] = 60;
2031  if (j < sl->list_count && i < sl->ref_count[j] &&
2032  sl->ref_list[j][i].parent->f->buf[0]) {
2033  int k;
2034  AVBuffer *buf = sl->ref_list[j][i].parent->f->buf[0]->buffer;
2035  for (k = 0; k < h->short_ref_count; k++)
2036  if (h->short_ref[k]->f->buf[0]->buffer == buf) {
2037  id_list[i] = k;
2038  break;
2039  }
2040  for (k = 0; k < h->long_ref_count; k++)
2041  if (h->long_ref[k] && h->long_ref[k]->f->buf[0]->buffer == buf) {
2042  id_list[i] = h->short_ref_count + k;
2043  break;
2044  }
2045  }
2046  }
2047 
2048  ref2frm[0] =
2049  ref2frm[1] = -1;
2050  for (i = 0; i < 16; i++)
2051  ref2frm[i + 2] = 4 * id_list[i] + (sl->ref_list[j][i].reference & 3);
2052  ref2frm[18 + 0] =
2053  ref2frm[18 + 1] = -1;
2054  for (i = 16; i < 48; i++)
2055  ref2frm[i + 4] = 4 * id_list[(i - 16) >> 1] +
2056  (sl->ref_list[j][i].reference & 3);
2057  }
2058 
2059  if (h->avctx->debug & FF_DEBUG_PICT_INFO) {
2061  "slice:%d %s mb:%d %c%s%s frame:%d poc:%d/%d ref:%d/%d qp:%d loop:%d:%d:%d weight:%d%s %s\n",
2062  sl->slice_num,
2063  (h->picture_structure == PICT_FRAME ? "F" : h->picture_structure == PICT_TOP_FIELD ? "T" : "B"),
2064  sl->mb_y * h->mb_width + sl->mb_x,
2066  sl->slice_type_fixed ? " fix" : "",
2067  nal->type == H264_NAL_IDR_SLICE ? " IDR" : "",
2068  h->poc.frame_num,
2069  h->cur_pic_ptr->field_poc[0],
2070  h->cur_pic_ptr->field_poc[1],
2071  sl->ref_count[0], sl->ref_count[1],
2072  sl->qscale,
2073  sl->deblocking_filter,
2075  sl->pwt.use_weight,
2076  sl->pwt.use_weight == 1 && sl->pwt.use_weight_chroma ? "c" : "",
2077  sl->slice_type == AV_PICTURE_TYPE_B ? (sl->direct_spatial_mv_pred ? "SPAT" : "TEMP") : "");
2078  }
2079 
2080  return 0;
2081 }
2082 
2084 {
2086  int first_slice = sl == h->slice_ctx && !h->current_slice;
2087  int ret;
2088 
2089  sl->gb = nal->gb;
2090 
2091  ret = h264_slice_header_parse(h, sl, nal);
2092  if (ret < 0)
2093  return ret;
2094 
2095  // discard redundant pictures
2096  if (sl->redundant_pic_count > 0) {
2097  sl->ref_count[0] = sl->ref_count[1] = 0;
2098  return 0;
2099  }
2100 
2101  if (sl->first_mb_addr == 0 || !h->current_slice) {
2102  if (h->setup_finished) {
2103  av_log(h->avctx, AV_LOG_ERROR, "Too many fields\n");
2104  return AVERROR_INVALIDDATA;
2105  }
2106  }
2107 
2108  if (sl->first_mb_addr == 0) { // FIXME better field boundary detection
2109  if (h->current_slice) {
2110  // this slice starts a new field
2111  // first decode any pending queued slices
2112  if (h->nb_slice_ctx_queued) {
2113  H264SliceContext tmp_ctx;
2114 
2116  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
2117  return ret;
2118 
2119  memcpy(&tmp_ctx, h->slice_ctx, sizeof(tmp_ctx));
2120  memcpy(h->slice_ctx, sl, sizeof(tmp_ctx));
2121  memcpy(sl, &tmp_ctx, sizeof(tmp_ctx));
2122  sl = h->slice_ctx;
2123  }
2124 
2125  if (h->cur_pic_ptr && FIELD_PICTURE(h) && h->first_field) {
2126  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2127  if (ret < 0)
2128  return ret;
2129  } else if (h->cur_pic_ptr && !FIELD_PICTURE(h) && !h->first_field && h->nal_unit_type == H264_NAL_IDR_SLICE) {
2130  av_log(h, AV_LOG_WARNING, "Broken frame packetizing\n");
2131  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2132  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
2133  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
2134  h->cur_pic_ptr = NULL;
2135  if (ret < 0)
2136  return ret;
2137  } else
2138  return AVERROR_INVALIDDATA;
2139  }
2140 
2141  if (!h->first_field) {
2142  if (h->cur_pic_ptr && !h->droppable) {
2145  }
2146  h->cur_pic_ptr = NULL;
2147  }
2148  }
2149 
2150  if (!h->current_slice)
2151  av_assert0(sl == h->slice_ctx);
2152 
2153  if (h->current_slice == 0 && !h->first_field) {
2154  if (
2155  (h->avctx->skip_frame >= AVDISCARD_NONREF && !h->nal_ref_idc) ||
2159  h->avctx->skip_frame >= AVDISCARD_ALL) {
2160  return 0;
2161  }
2162  }
2163 
2164  if (!first_slice) {
2165  const PPS *pps = (const PPS*)h->ps.pps_list[sl->pps_id]->data;
2166 
2167  if (h->ps.pps->sps_id != pps->sps_id ||
2168  h->ps.pps->transform_8x8_mode != pps->transform_8x8_mode /*||
2169  (h->setup_finished && h->ps.pps != pps)*/) {
2170  av_log(h->avctx, AV_LOG_ERROR, "PPS changed between slices\n");
2171  return AVERROR_INVALIDDATA;
2172  }
2173  if (h->ps.sps != pps->sps) {
2175  "SPS changed in the middle of the frame\n");
2176  return AVERROR_INVALIDDATA;
2177  }
2178  }
2179 
2180  if (h->current_slice == 0) {
2181  ret = h264_field_start(h, sl, nal, first_slice);
2182  if (ret < 0)
2183  return ret;
2184  } else {
2185  if (h->picture_structure != sl->picture_structure ||
2186  h->droppable != (nal->ref_idc == 0)) {
2188  "Changing field mode (%d -> %d) between slices is not allowed\n",
2190  return AVERROR_INVALIDDATA;
2191  } else if (!h->cur_pic_ptr) {
2193  "unset cur_pic_ptr on slice %d\n",
2194  h->current_slice + 1);
2195  return AVERROR_INVALIDDATA;
2196  }
2197  }
2198 
2199  ret = h264_slice_init(h, sl, nal);
2200  if (ret < 0)
2201  return ret;
2202 
2203  h->nb_slice_ctx_queued++;
2204 
2205  return 0;
2206 }
2207 
2209 {
2210  switch (sl->slice_type) {
2211  case AV_PICTURE_TYPE_P:
2212  return 0;
2213  case AV_PICTURE_TYPE_B:
2214  return 1;
2215  case AV_PICTURE_TYPE_I:
2216  return 2;
2217  case AV_PICTURE_TYPE_SP:
2218  return 3;
2219  case AV_PICTURE_TYPE_SI:
2220  return 4;
2221  default:
2222  return AVERROR_INVALIDDATA;
2223  }
2224 }
2225 
2227  H264SliceContext *sl,
2228  int mb_type, int top_xy,
2229  int left_xy[LEFT_MBS],
2230  int top_type,
2231  int left_type[LEFT_MBS],
2232  int mb_xy, int list)
2233 {
2234  int b_stride = h->b_stride;
2235  int16_t(*mv_dst)[2] = &sl->mv_cache[list][scan8[0]];
2236  int8_t *ref_cache = &sl->ref_cache[list][scan8[0]];
2237  if (IS_INTER(mb_type) || IS_DIRECT(mb_type)) {
2238  if (USES_LIST(top_type, list)) {
2239  const int b_xy = h->mb2b_xy[top_xy] + 3 * b_stride;
2240  const int b8_xy = 4 * top_xy + 2;
2241  const int *ref2frm = &h->ref2frm[h->slice_table[top_xy] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2242  AV_COPY128(mv_dst - 1 * 8, h->cur_pic.motion_val[list][b_xy + 0]);
2243  ref_cache[0 - 1 * 8] =
2244  ref_cache[1 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 0]];
2245  ref_cache[2 - 1 * 8] =
2246  ref_cache[3 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 1]];
2247  } else {
2248  AV_ZERO128(mv_dst - 1 * 8);
2249  AV_WN32A(&ref_cache[0 - 1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2250  }
2251 
2252  if (!IS_INTERLACED(mb_type ^ left_type[LTOP])) {
2253  if (USES_LIST(left_type[LTOP], list)) {
2254  const int b_xy = h->mb2b_xy[left_xy[LTOP]] + 3;
2255  const int b8_xy = 4 * left_xy[LTOP] + 1;
2256  const int *ref2frm = &h->ref2frm[h->slice_table[left_xy[LTOP]] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2257  AV_COPY32(mv_dst - 1 + 0, h->cur_pic.motion_val[list][b_xy + b_stride * 0]);
2258  AV_COPY32(mv_dst - 1 + 8, h->cur_pic.motion_val[list][b_xy + b_stride * 1]);
2259  AV_COPY32(mv_dst - 1 + 16, h->cur_pic.motion_val[list][b_xy + b_stride * 2]);
2260  AV_COPY32(mv_dst - 1 + 24, h->cur_pic.motion_val[list][b_xy + b_stride * 3]);
2261  ref_cache[-1 + 0] =
2262  ref_cache[-1 + 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 0]];
2263  ref_cache[-1 + 16] =
2264  ref_cache[-1 + 24] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 1]];
2265  } else {
2266  AV_ZERO32(mv_dst - 1 + 0);
2267  AV_ZERO32(mv_dst - 1 + 8);
2268  AV_ZERO32(mv_dst - 1 + 16);
2269  AV_ZERO32(mv_dst - 1 + 24);
2270  ref_cache[-1 + 0] =
2271  ref_cache[-1 + 8] =
2272  ref_cache[-1 + 16] =
2273  ref_cache[-1 + 24] = LIST_NOT_USED;
2274  }
2275  }
2276  }
2277 
2278  if (!USES_LIST(mb_type, list)) {
2279  fill_rectangle(mv_dst, 4, 4, 8, pack16to32(0, 0), 4);
2280  AV_WN32A(&ref_cache[0 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2281  AV_WN32A(&ref_cache[1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2282  AV_WN32A(&ref_cache[2 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2283  AV_WN32A(&ref_cache[3 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2284  return;
2285  }
2286 
2287  {
2288  int8_t *ref = &h->cur_pic.ref_index[list][4 * mb_xy];
2289  const int *ref2frm = &h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2290  uint32_t ref01 = (pack16to32(ref2frm[ref[0]], ref2frm[ref[1]]) & 0x00FF00FF) * 0x0101;
2291  uint32_t ref23 = (pack16to32(ref2frm[ref[2]], ref2frm[ref[3]]) & 0x00FF00FF) * 0x0101;
2292  AV_WN32A(&ref_cache[0 * 8], ref01);
2293  AV_WN32A(&ref_cache[1 * 8], ref01);
2294  AV_WN32A(&ref_cache[2 * 8], ref23);
2295  AV_WN32A(&ref_cache[3 * 8], ref23);
2296  }
2297 
2298  {
2299  int16_t(*mv_src)[2] = &h->cur_pic.motion_val[list][4 * sl->mb_x + 4 * sl->mb_y * b_stride];
2300  AV_COPY128(mv_dst + 8 * 0, mv_src + 0 * b_stride);
2301  AV_COPY128(mv_dst + 8 * 1, mv_src + 1 * b_stride);
2302  AV_COPY128(mv_dst + 8 * 2, mv_src + 2 * b_stride);
2303  AV_COPY128(mv_dst + 8 * 3, mv_src + 3 * b_stride);
2304  }
2305 }
2306 
2307 /**
2308  * @return non zero if the loop filter can be skipped
2309  */
2310 static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
2311 {
2312  const int mb_xy = sl->mb_xy;
2313  int top_xy, left_xy[LEFT_MBS];
2314  int top_type, left_type[LEFT_MBS];
2315  uint8_t *nnz;
2316  uint8_t *nnz_cache;
2317 
2318  top_xy = mb_xy - (h->mb_stride << MB_FIELD(sl));
2319 
2320  left_xy[LBOT] = left_xy[LTOP] = mb_xy - 1;
2321  if (FRAME_MBAFF(h)) {
2322  const int left_mb_field_flag = IS_INTERLACED(h->cur_pic.mb_type[mb_xy - 1]);
2323  const int curr_mb_field_flag = IS_INTERLACED(mb_type);
2324  if (sl->mb_y & 1) {
2325  if (left_mb_field_flag != curr_mb_field_flag)
2326  left_xy[LTOP] -= h->mb_stride;
2327  } else {
2328  if (curr_mb_field_flag)
2329  top_xy += h->mb_stride &
2330  (((h->cur_pic.mb_type[top_xy] >> 7) & 1) - 1);
2331  if (left_mb_field_flag != curr_mb_field_flag)
2332  left_xy[LBOT] += h->mb_stride;
2333  }
2334  }
2335 
2336  sl->top_mb_xy = top_xy;
2337  sl->left_mb_xy[LTOP] = left_xy[LTOP];
2338  sl->left_mb_xy[LBOT] = left_xy[LBOT];
2339  {
2340  /* For sufficiently low qp, filtering wouldn't do anything.
2341  * This is a conservative estimate: could also check beta_offset
2342  * and more accurate chroma_qp. */
2343  int qp_thresh = sl->qp_thresh; // FIXME strictly we should store qp_thresh for each mb of a slice
2344  int qp = h->cur_pic.qscale_table[mb_xy];
2345  if (qp <= qp_thresh &&
2346  (left_xy[LTOP] < 0 ||
2347  ((qp + h->cur_pic.qscale_table[left_xy[LTOP]] + 1) >> 1) <= qp_thresh) &&
2348  (top_xy < 0 ||
2349  ((qp + h->cur_pic.qscale_table[top_xy] + 1) >> 1) <= qp_thresh)) {
2350  if (!FRAME_MBAFF(h))
2351  return 1;
2352  if ((left_xy[LTOP] < 0 ||
2353  ((qp + h->cur_pic.qscale_table[left_xy[LBOT]] + 1) >> 1) <= qp_thresh) &&
2354  (top_xy < h->mb_stride ||
2355  ((qp + h->cur_pic.qscale_table[top_xy - h->mb_stride] + 1) >> 1) <= qp_thresh))
2356  return 1;
2357  }
2358  }
2359 
2360  top_type = h->cur_pic.mb_type[top_xy];
2361  left_type[LTOP] = h->cur_pic.mb_type[left_xy[LTOP]];
2362  left_type[LBOT] = h->cur_pic.mb_type[left_xy[LBOT]];
2363  if (sl->deblocking_filter == 2) {
2364  if (h->slice_table[top_xy] != sl->slice_num)
2365  top_type = 0;
2366  if (h->slice_table[left_xy[LBOT]] != sl->slice_num)
2367  left_type[LTOP] = left_type[LBOT] = 0;
2368  } else {
2369  if (h->slice_table[top_xy] == 0xFFFF)
2370  top_type = 0;
2371  if (h->slice_table[left_xy[LBOT]] == 0xFFFF)
2372  left_type[LTOP] = left_type[LBOT] = 0;
2373  }
2374  sl->top_type = top_type;
2375  sl->left_type[LTOP] = left_type[LTOP];
2376  sl->left_type[LBOT] = left_type[LBOT];
2377 
2378  if (IS_INTRA(mb_type))
2379  return 0;
2380 
2381  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2382  top_type, left_type, mb_xy, 0);
2383  if (sl->list_count == 2)
2384  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2385  top_type, left_type, mb_xy, 1);
2386 
2387  nnz = h->non_zero_count[mb_xy];
2388  nnz_cache = sl->non_zero_count_cache;
2389  AV_COPY32(&nnz_cache[4 + 8 * 1], &nnz[0]);
2390  AV_COPY32(&nnz_cache[4 + 8 * 2], &nnz[4]);
2391  AV_COPY32(&nnz_cache[4 + 8 * 3], &nnz[8]);
2392  AV_COPY32(&nnz_cache[4 + 8 * 4], &nnz[12]);
2393  sl->cbp = h->cbp_table[mb_xy];
2394 
2395  if (top_type) {
2396  nnz = h->non_zero_count[top_xy];
2397  AV_COPY32(&nnz_cache[4 + 8 * 0], &nnz[3 * 4]);
2398  }
2399 
2400  if (left_type[LTOP]) {
2401  nnz = h->non_zero_count[left_xy[LTOP]];
2402  nnz_cache[3 + 8 * 1] = nnz[3 + 0 * 4];
2403  nnz_cache[3 + 8 * 2] = nnz[3 + 1 * 4];
2404  nnz_cache[3 + 8 * 3] = nnz[3 + 2 * 4];
2405  nnz_cache[3 + 8 * 4] = nnz[3 + 3 * 4];
2406  }
2407 
2408  /* CAVLC 8x8dct requires NNZ values for residual decoding that differ
2409  * from what the loop filter needs */
2410  if (!CABAC(h) && h->ps.pps->transform_8x8_mode) {
2411  if (IS_8x8DCT(top_type)) {
2412  nnz_cache[4 + 8 * 0] =
2413  nnz_cache[5 + 8 * 0] = (h->cbp_table[top_xy] & 0x4000) >> 12;
2414  nnz_cache[6 + 8 * 0] =
2415  nnz_cache[7 + 8 * 0] = (h->cbp_table[top_xy] & 0x8000) >> 12;
2416  }
2417  if (IS_8x8DCT(left_type[LTOP])) {
2418  nnz_cache[3 + 8 * 1] =
2419  nnz_cache[3 + 8 * 2] = (h->cbp_table[left_xy[LTOP]] & 0x2000) >> 12; // FIXME check MBAFF
2420  }
2421  if (IS_8x8DCT(left_type[LBOT])) {
2422  nnz_cache[3 + 8 * 3] =
2423  nnz_cache[3 + 8 * 4] = (h->cbp_table[left_xy[LBOT]] & 0x8000) >> 12; // FIXME check MBAFF
2424  }
2425 
2426  if (IS_8x8DCT(mb_type)) {
2427  nnz_cache[scan8[0]] =
2428  nnz_cache[scan8[1]] =
2429  nnz_cache[scan8[2]] =
2430  nnz_cache[scan8[3]] = (sl->cbp & 0x1000) >> 12;
2431 
2432  nnz_cache[scan8[0 + 4]] =
2433  nnz_cache[scan8[1 + 4]] =
2434  nnz_cache[scan8[2 + 4]] =
2435  nnz_cache[scan8[3 + 4]] = (sl->cbp & 0x2000) >> 12;
2436 
2437  nnz_cache[scan8[0 + 8]] =
2438  nnz_cache[scan8[1 + 8]] =
2439  nnz_cache[scan8[2 + 8]] =
2440  nnz_cache[scan8[3 + 8]] = (sl->cbp & 0x4000) >> 12;
2441 
2442  nnz_cache[scan8[0 + 12]] =
2443  nnz_cache[scan8[1 + 12]] =
2444  nnz_cache[scan8[2 + 12]] =
2445  nnz_cache[scan8[3 + 12]] = (sl->cbp & 0x8000) >> 12;
2446  }
2447  }
2448 
2449  return 0;
2450 }
2451 
2452 static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
2453 {
2454  uint8_t *dest_y, *dest_cb, *dest_cr;
2455  int linesize, uvlinesize, mb_x, mb_y;
2456  const int end_mb_y = sl->mb_y + FRAME_MBAFF(h);
2457  const int old_slice_type = sl->slice_type;
2458  const int pixel_shift = h->pixel_shift;
2459  const int block_h = 16 >> h->chroma_y_shift;
2460 
2461  if (h->postpone_filter)
2462  return;
2463 
2464  if (sl->deblocking_filter) {
2465  for (mb_x = start_x; mb_x < end_x; mb_x++)
2466  for (mb_y = end_mb_y - FRAME_MBAFF(h); mb_y <= end_mb_y; mb_y++) {
2467  int mb_xy, mb_type;
2468  mb_xy = sl->mb_xy = mb_x + mb_y * h->mb_stride;
2469  mb_type = h->cur_pic.mb_type[mb_xy];
2470 
2471  if (FRAME_MBAFF(h))
2472  sl->mb_mbaff =
2473  sl->mb_field_decoding_flag = !!IS_INTERLACED(mb_type);
2474 
2475  sl->mb_x = mb_x;
2476  sl->mb_y = mb_y;
2477  dest_y = h->cur_pic.f->data[0] +
2478  ((mb_x << pixel_shift) + mb_y * sl->linesize) * 16;
2479  dest_cb = h->cur_pic.f->data[1] +
2480  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2481  mb_y * sl->uvlinesize * block_h;
2482  dest_cr = h->cur_pic.f->data[2] +
2483  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2484  mb_y * sl->uvlinesize * block_h;
2485  // FIXME simplify above
2486 
2487  if (MB_FIELD(sl)) {
2488  linesize = sl->mb_linesize = sl->linesize * 2;
2489  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize * 2;
2490  if (mb_y & 1) { // FIXME move out of this function?
2491  dest_y -= sl->linesize * 15;
2492  dest_cb -= sl->uvlinesize * (block_h - 1);
2493  dest_cr -= sl->uvlinesize * (block_h - 1);
2494  }
2495  } else {
2496  linesize = sl->mb_linesize = sl->linesize;
2497  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize;
2498  }
2499  backup_mb_border(h, sl, dest_y, dest_cb, dest_cr, linesize,
2500  uvlinesize, 0);
2501  if (fill_filter_caches(h, sl, mb_type))
2502  continue;
2503  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, h->cur_pic.qscale_table[mb_xy]);
2504  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, h->cur_pic.qscale_table[mb_xy]);
2505 
2506  if (FRAME_MBAFF(h)) {
2507  ff_h264_filter_mb(h, sl, mb_x, mb_y, dest_y, dest_cb, dest_cr,
2508  linesize, uvlinesize);
2509  } else {
2510  ff_h264_filter_mb_fast(h, sl, mb_x, mb_y, dest_y, dest_cb,
2511  dest_cr, linesize, uvlinesize);
2512  }
2513  }
2514  }
2515  sl->slice_type = old_slice_type;
2516  sl->mb_x = end_x;
2517  sl->mb_y = end_mb_y - FRAME_MBAFF(h);
2518  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, sl->qscale);
2519  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, sl->qscale);
2520 }
2521 
2523 {
2524  const int mb_xy = sl->mb_x + sl->mb_y * h->mb_stride;
2525  int mb_type = (h->slice_table[mb_xy - 1] == sl->slice_num) ?
2526  h->cur_pic.mb_type[mb_xy - 1] :
2527  (h->slice_table[mb_xy - h->mb_stride] == sl->slice_num) ?
2528  h->cur_pic.mb_type[mb_xy - h->mb_stride] : 0;
2529  sl->mb_mbaff = sl->mb_field_decoding_flag = IS_INTERLACED(mb_type) ? 1 : 0;
2530 }
2531 
2532 /**
2533  * Draw edges and report progress for the last MB row.
2534  */
2536 {
2537  int top = 16 * (sl->mb_y >> FIELD_PICTURE(h));
2538  int pic_height = 16 * h->mb_height >> FIELD_PICTURE(h);
2539  int height = 16 << FRAME_MBAFF(h);
2540  int deblock_border = (16 + 4) << FRAME_MBAFF(h);
2541 
2542  if (sl->deblocking_filter) {
2543  if ((top + height) >= pic_height)
2544  height += deblock_border;
2545  top -= deblock_border;
2546  }
2547 
2548  if (top >= pic_height || (top + height) < 0)
2549  return;
2550 
2551  height = FFMIN(height, pic_height - top);
2552  if (top < 0) {
2553  height = top + height;
2554  top = 0;
2555  }
2556 
2557  ff_h264_draw_horiz_band(h, sl, top, height);
2558 
2559  if (h->droppable || sl->h264->slice_ctx[0].er.error_occurred)
2560  return;
2561 
2562  ff_thread_report_progress(&h->cur_pic_ptr->tf, top + height - 1,
2564 }
2565 
2567  int startx, int starty,
2568  int endx, int endy, int status)
2569 {
2570  if (!sl->h264->enable_er)
2571  return;
2572 
2573  if (CONFIG_ERROR_RESILIENCE) {
2574  ERContext *er = &sl->h264->slice_ctx[0].er;
2575 
2576  ff_er_add_slice(er, startx, starty, endx, endy, status);
2577  }
2578 }
2579 
2580 static int decode_slice(struct AVCodecContext *avctx, void *arg)
2581 {
2582  H264SliceContext *sl = arg;
2583  const H264Context *h = sl->h264;
2584  int lf_x_start = sl->mb_x;
2585  int orig_deblock = sl->deblocking_filter;
2586  int ret;
2587 
2588  sl->linesize = h->cur_pic_ptr->f->linesize[0];
2589  sl->uvlinesize = h->cur_pic_ptr->f->linesize[1];
2590 
2591  ret = alloc_scratch_buffers(sl, sl->linesize);
2592  if (ret < 0)
2593  return ret;
2594 
2595  sl->mb_skip_run = -1;
2596 
2597  av_assert0(h->block_offset[15] == (4 * ((scan8[15] - scan8[0]) & 7) << h->pixel_shift) + 4 * sl->linesize * ((scan8[15] - scan8[0]) >> 3));
2598 
2599  if (h->postpone_filter)
2600  sl->deblocking_filter = 0;
2601 
2602  sl->is_complex = FRAME_MBAFF(h) || h->picture_structure != PICT_FRAME ||
2603  (CONFIG_GRAY && (h->flags & AV_CODEC_FLAG_GRAY));
2604 
2606  const int start_i = av_clip(sl->resync_mb_x + sl->resync_mb_y * h->mb_width, 0, h->mb_num - 1);
2607  if (start_i) {
2608  int prev_status = h->slice_ctx[0].er.error_status_table[h->slice_ctx[0].er.mb_index2xy[start_i - 1]];
2609  prev_status &= ~ VP_START;
2610  if (prev_status != (ER_MV_END | ER_DC_END | ER_AC_END))
2611  h->slice_ctx[0].er.error_occurred = 1;
2612  }
2613  }
2614 
2615  if (h->ps.pps->cabac) {
2616  /* realign */
2617  align_get_bits(&sl->gb);
2618 
2619  /* init cabac */
2620  ret = ff_init_cabac_decoder(&sl->cabac,
2621  sl->gb.buffer + get_bits_count(&sl->gb) / 8,
2622  (get_bits_left(&sl->gb) + 7) / 8);
2623  if (ret < 0)
2624  return ret;
2625 
2627 
2628  for (;;) {
2629  int ret, eos;
2630  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2631  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2632  sl->next_slice_idx);
2633  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2634  sl->mb_y, ER_MB_ERROR);
2635  return AVERROR_INVALIDDATA;
2636  }
2637 
2638  ret = ff_h264_decode_mb_cabac(h, sl);
2639 
2640  if (ret >= 0)
2641  ff_h264_hl_decode_mb(h, sl);
2642 
2643  // FIXME optimal? or let mb_decode decode 16x32 ?
2644  if (ret >= 0 && FRAME_MBAFF(h)) {
2645  sl->mb_y++;
2646 
2647  ret = ff_h264_decode_mb_cabac(h, sl);
2648 
2649  if (ret >= 0)
2650  ff_h264_hl_decode_mb(h, sl);
2651  sl->mb_y--;
2652  }
2653  eos = get_cabac_terminate(&sl->cabac);
2654 
2655  if ((h->workaround_bugs & FF_BUG_TRUNCATED) &&
2656  sl->cabac.bytestream > sl->cabac.bytestream_end + 2) {
2657  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2658  sl->mb_y, ER_MB_END);
2659  if (sl->mb_x >= lf_x_start)
2660  loop_filter(h, sl, lf_x_start, sl->mb_x + 1);
2661  goto finish;
2662  }
2663  if (sl->cabac.bytestream > sl->cabac.bytestream_end + 2 )
2664  av_log(h->avctx, AV_LOG_DEBUG, "bytestream overread %"PTRDIFF_SPECIFIER"\n", sl->cabac.bytestream_end - sl->cabac.bytestream);
2665  if (ret < 0 || sl->cabac.bytestream > sl->cabac.bytestream_end + 4) {
2667  "error while decoding MB %d %d, bytestream %"PTRDIFF_SPECIFIER"\n",
2668  sl->mb_x, sl->mb_y,
2669  sl->cabac.bytestream_end - sl->cabac.bytestream);
2670  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2671  sl->mb_y, ER_MB_ERROR);
2672  return AVERROR_INVALIDDATA;
2673  }
2674 
2675  if (++sl->mb_x >= h->mb_width) {
2676  loop_filter(h, sl, lf_x_start, sl->mb_x);
2677  sl->mb_x = lf_x_start = 0;
2678  decode_finish_row(h, sl);
2679  ++sl->mb_y;
2680  if (FIELD_OR_MBAFF_PICTURE(h)) {
2681  ++sl->mb_y;
2682  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2684  }
2685  }
2686 
2687  if (eos || sl->mb_y >= h->mb_height) {
2688  ff_tlog(h->avctx, "slice end %d %d\n",
2689  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2690  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2691  sl->mb_y, ER_MB_END);
2692  if (sl->mb_x > lf_x_start)
2693  loop_filter(h, sl, lf_x_start, sl->mb_x);
2694  goto finish;
2695  }
2696  }
2697  } else {
2698  for (;;) {
2699  int ret;
2700 
2701  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2702  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2703  sl->next_slice_idx);
2704  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2705  sl->mb_y, ER_MB_ERROR);
2706  return AVERROR_INVALIDDATA;
2707  }
2708 
2709  ret = ff_h264_decode_mb_cavlc(h, sl);
2710 
2711  if (ret >= 0)
2712  ff_h264_hl_decode_mb(h, sl);
2713 
2714  // FIXME optimal? or let mb_decode decode 16x32 ?
2715  if (ret >= 0 && FRAME_MBAFF(h)) {
2716  sl->mb_y++;
2717  ret = ff_h264_decode_mb_cavlc(h, sl);
2718 
2719  if (ret >= 0)
2720  ff_h264_hl_decode_mb(h, sl);
2721  sl->mb_y--;
2722  }
2723 
2724  if (ret < 0) {
2726  "error while decoding MB %d %d\n", sl->mb_x, sl->mb_y);
2727  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2728  sl->mb_y, ER_MB_ERROR);
2729  return ret;
2730  }
2731 
2732  if (++sl->mb_x >= h->mb_width) {
2733  loop_filter(h, sl, lf_x_start, sl->mb_x);
2734  sl->mb_x = lf_x_start = 0;
2735  decode_finish_row(h, sl);
2736  ++sl->mb_y;
2737  if (FIELD_OR_MBAFF_PICTURE(h)) {
2738  ++sl->mb_y;
2739  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2741  }
2742  if (sl->mb_y >= h->mb_height) {
2743  ff_tlog(h->avctx, "slice end %d %d\n",
2744  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2745 
2746  if ( get_bits_left(&sl->gb) == 0
2747  || get_bits_left(&sl->gb) > 0 && !(h->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
2748  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2749  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2750 
2751  goto finish;
2752  } else {
2753  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2754  sl->mb_x, sl->mb_y, ER_MB_END);
2755 
2756  return AVERROR_INVALIDDATA;
2757  }
2758  }
2759  }
2760 
2761  if (get_bits_left(&sl->gb) <= 0 && sl->mb_skip_run <= 0) {
2762  ff_tlog(h->avctx, "slice end %d %d\n",
2763  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2764 
2765  if (get_bits_left(&sl->gb) == 0) {
2766  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2767  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2768  if (sl->mb_x > lf_x_start)
2769  loop_filter(h, sl, lf_x_start, sl->mb_x);
2770 
2771  goto finish;
2772  } else {
2773  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2774  sl->mb_y, ER_MB_ERROR);
2775 
2776  return AVERROR_INVALIDDATA;
2777  }
2778  }
2779  }
2780  }
2781 
2782 finish:
2783  sl->deblocking_filter = orig_deblock;
2784  return 0;
2785 }
2786 
2787 /**
2788  * Call decode_slice() for each context.
2789  *
2790  * @param h h264 master context
2791  */
2793 {
2794  AVCodecContext *const avctx = h->avctx;
2795  H264SliceContext *sl;
2796  int context_count = h->nb_slice_ctx_queued;
2797  int ret = 0;
2798  int i, j;
2799 
2800  h->slice_ctx[0].next_slice_idx = INT_MAX;
2801 
2802  if (h->avctx->hwaccel || context_count < 1)
2803  return 0;
2804 
2805  av_assert0(context_count && h->slice_ctx[context_count - 1].mb_y < h->mb_height);
2806 
2807  if (context_count == 1) {
2808 
2809  h->slice_ctx[0].next_slice_idx = h->mb_width * h->mb_height;
2810  h->postpone_filter = 0;
2811 
2812  ret = decode_slice(avctx, &h->slice_ctx[0]);
2813  h->mb_y = h->slice_ctx[0].mb_y;
2814  if (ret < 0)
2815  goto finish;
2816  } else {
2817  av_assert0(context_count > 0);
2818  for (i = 0; i < context_count; i++) {
2819  int next_slice_idx = h->mb_width * h->mb_height;
2820  int slice_idx;
2821 
2822  sl = &h->slice_ctx[i];
2823  if (CONFIG_ERROR_RESILIENCE) {
2824  sl->er.error_count = 0;
2825  }
2826 
2827  /* make sure none of those slices overlap */
2828  slice_idx = sl->mb_y * h->mb_width + sl->mb_x;
2829  for (j = 0; j < context_count; j++) {
2830  H264SliceContext *sl2 = &h->slice_ctx[j];
2831  int slice_idx2 = sl2->mb_y * h->mb_width + sl2->mb_x;
2832 
2833  if (i == j || slice_idx2 < slice_idx)
2834  continue;
2835  next_slice_idx = FFMIN(next_slice_idx, slice_idx2);
2836  }
2837  sl->next_slice_idx = next_slice_idx;
2838  }
2839 
2840  avctx->execute(avctx, decode_slice, h->slice_ctx,
2841  NULL, context_count, sizeof(h->slice_ctx[0]));
2842 
2843  /* pull back stuff from slices to master context */
2844  sl = &h->slice_ctx[context_count - 1];
2845  h->mb_y = sl->mb_y;
2846  if (CONFIG_ERROR_RESILIENCE) {
2847  for (i = 1; i < context_count; i++)
2849  }
2850 
2851  if (h->postpone_filter) {
2852  h->postpone_filter = 0;
2853 
2854  for (i = 0; i < context_count; i++) {
2855  int y_end, x_end;
2856 
2857  sl = &h->slice_ctx[i];
2858  y_end = FFMIN(sl->mb_y + 1, h->mb_height);
2859  x_end = (sl->mb_y >= h->mb_height) ? h->mb_width : sl->mb_x;
2860 
2861  for (j = sl->resync_mb_y; j < y_end; j += 1 + FIELD_OR_MBAFF_PICTURE(h)) {
2862  sl->mb_y = j;
2863  loop_filter(h, sl, j > sl->resync_mb_y ? 0 : sl->resync_mb_x,
2864  j == y_end - 1 ? x_end : h->mb_width);
2865  }
2866  }
2867  }
2868  }
2869 
2870 finish:
2871  h->nb_slice_ctx_queued = 0;
2872  return ret;
2873 }
int chroma_format_idc
Definition: h264_ps.h:48
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
int video_signal_type_present_flag
Definition: h264_ps.h:74
struct H264Context * h264
Definition: h264dec.h:184
#define AV_EF_AGGRESSIVE
consider things that a sane encoder should not do as an error
Definition: avcodec.h:1676
#define ff_tlog(ctx,...)
Definition: internal.h:86
av_cold void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:38
#define NULL
Definition: coverity.c:32
int ff_thread_can_start_frame(AVCodecContext *avctx)
const struct AVCodec * codec
Definition: avcodec.h:540
AVRational framerate
Definition: avcodec.h:2081
discard all frames except keyframes
Definition: avcodec.h:235
static void init_dimensions(H264Context *h)
Definition: h264_slice.c:880
int nb_mmco
Definition: h264dec.h:480
int workaround_bugs
Definition: h264dec.h:373
int long_ref
1->long term reference 0->short term reference
Definition: h264dec.h:154
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
int sei_recovery_frame_cnt
Definition: h264dec.h:163
int ff_h264_queue_decode_slice(H264Context *h, const H2645NAL *nal)
Submit a slice for decoding.
Definition: h264_slice.c:2083
H264POCContext poc
Definition: h264dec.h:466
int mb_num
Definition: h264dec.h:443
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
This structure describes decoded (raw) audio or video data.
Definition: frame.h:314
int mb_aff_frame
Definition: h264dec.h:412
int recovery_frame_cnt
recovery_frame_cnt
Definition: h264_sei.h:141
int16_t mv_cache[2][5 *8][2]
Motion vector cache.
Definition: h264dec.h:305
enum AVStereo3DView view
Determines which views are packed.
Definition: stereo3d.h:190
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:409
static int get_se_golomb(GetBitContext *gb)
read signed exp golomb code.
Definition: golomb.h:241
int edge_emu_buffer_allocated
Definition: h264dec.h:293
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:719
static void decode_finish_row(const H264Context *h, H264SliceContext *sl)
Draw edges and report progress for the last MB row.
Definition: h264_slice.c:2535
int first_field
Definition: h264dec.h:414
uint8_t field_scan8x8_q0[64]
Definition: h264dec.h:437
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
misc image utilities
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:379
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
#define ER_MB_END
AVFrame * f
Definition: thread.h:35
int weighted_bipred_idc
Definition: h264_ps.h:119
int ff_h264_build_ref_list(H264Context *h, H264SliceContext *sl)
Definition: h264_refs.c:299
int left_mb_xy[LEFT_MBS]
Definition: h264dec.h:217
int chroma_qp_index_offset[2]
Definition: h264_ps.h:122
AVBufferRef * sps_list[MAX_SPS_COUNT]
Definition: h264_ps.h:144
const uint8_t * bytestream_end
Definition: cabac.h:49
static av_always_inline int get_chroma_qp(const PPS *pps, int t, int qscale)
Get the chroma qp.
Definition: h264dec.h:687
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:505
hardware decoding through Videotoolbox
Definition: pixfmt.h:282
H264ChromaContext h264chroma
Definition: h264dec.h:348
uint16_t * cbp_table
Definition: h264dec.h:419
int luma_weight_flag[2]
7.4.3.2 luma_weight_lX_flag
Definition: h264_parse.h:35
MMCO mmco[MAX_MMCO_COUNT]
memory management control operations buffer.
Definition: h264dec.h:479
static void implicit_weight_table(const H264Context *h, H264SliceContext *sl, int field)
Initialize implicit_weight table.
Definition: h264_slice.c:669
#define avpriv_request_sample(...)
Sequence parameter set.
Definition: h264_ps.h:44
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1166
int mb_y
Definition: h264dec.h:440
int coded_picture_number
Definition: h264dec.h:369
int bitstream_restriction_flag
Definition: h264_ps.h:87
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
H264SEIAlternativeTransfer alternative_transfer
Definition: h264_sei.h:194
int num
Numerator.
Definition: rational.h:59
Timecode which conforms to SMPTE ST 12-1.
Definition: frame.h:168
AVBufferRef * mb_type_buf
Definition: h264dec.h:138
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:456
int bipred_scratchpad_allocated
Definition: h264dec.h:292
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:117
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output...
Definition: diracdec.c:67
Frame contains only the right view.
Definition: stereo3d.h:161
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:415
#define VP_START
< current MB is the first after a resync marker
AVBufferPool * mb_type_pool
Definition: h264dec.h:556
int ff_h264_init_poc(int pic_field_poc[2], int *pic_poc, const SPS *sps, H264POCContext *pc, int picture_structure, int nal_ref_idc)
Definition: h264_parse.c:277
int chroma_x_shift
Definition: h264dec.h:366
const uint8_t * buffer
Definition: get_bits.h:62
Picture parameter set.
Definition: h264_ps.h:111
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
Definition: avcodec.h:910
int16_t(*[2] motion_val)[2]
Definition: h264dec.h:136
int flags
Definition: h264dec.h:372
void ff_h264_flush_change(H264Context *h)
Definition: h264dec.c:439
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:741
int frame_mbs_only_flag
Definition: h264_ps.h:62
int mb_height
Definition: h264dec.h:441
H264Picture * delayed_pic[MAX_DELAYED_PIC_COUNT+2]
Definition: h264dec.h:471
int is_avc
Used to parse AVC variant of H.264.
Definition: h264dec.h:456
av_cold void ff_h264_pred_init(H264PredContext *h, int codec_id, const int bit_depth, int chroma_format_idc)
Set the intra prediction function pointers.
Definition: h264pred.c:411
AVBufferPool * ref_index_pool
Definition: h264dec.h:558
int height_from_caller
Definition: h264dec.h:549
uint8_t zigzag_scan8x8_cavlc[64]
Definition: h264dec.h:429
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
av_cold void ff_h264chroma_init(H264ChromaContext *c, int bit_depth)
Definition: h264chroma.c:41
ERPicture last_pic
H264SEIDisplayOrientation display_orientation
Definition: h264_sei.h:192
mpegvideo header.
int current_frame_is_frame0_flag
Definition: h264_sei.h:157
int next_slice_idx
Definition: h264dec.h:242
static const uint8_t zigzag_scan8x8_cavlc[64+1]
Definition: h264_slice.c:96
H264Context.
Definition: h264dec.h:343
discard all non intra frames
Definition: avcodec.h:234
discard all
Definition: avcodec.h:236
AVFrame * f
Definition: h264dec.h:129
const PPS * pps
Definition: h264dec.h:166
Views are next to each other.
Definition: stereo3d.h:67
size_t crop_bottom
Definition: frame.h:675
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1762
uint32_t num_units_in_tick
Definition: h264_ps.h:83
static const uint8_t field_scan[16+1]
Definition: h264_slice.c:50
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1695
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them.reget_buffer() and buffer age optimizations no longer work.*The contents of buffers must not be written to after ff_thread_report_progress() has been called on them.This includes draw_edges().Porting codecs to frame threading
H264Picture * long_ref[32]
Definition: h264dec.h:470
int profile
profile
Definition: avcodec.h:1871
int picture_structure
Definition: h264dec.h:413
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
Definition: pixfmt.h:513
#define AV_WN32A(p, v)
Definition: intreadwrite.h:538
#define AV_COPY32(d, s)
Definition: intreadwrite.h:601
void av_display_matrix_flip(int32_t matrix[9], int hflip, int vflip)
Flip the input matrix horizontally and/or vertically.
Definition: display.c:65
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264dec.h:273
#define IN_RANGE(a, b, size)
Definition: h264_slice.c:273
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: h264_slice.c:275
MMCO mmco[MAX_MMCO_COUNT]
Definition: h264dec.h:328
void av_display_rotation_set(int32_t matrix[9], double angle)
Initialize a transformation matrix describing a pure counterclockwise rotation by the specified angle...
Definition: display.c:50
Frame contains only the left view.
Definition: stereo3d.h:156
int ff_h264_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: h264_slice.c:296
Switching Intra.
Definition: avutil.h:278
int setup_finished
Definition: h264dec.h:540
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:2016
int ff_h264_execute_decode_slices(H264Context *h)
Call decode_slice() for each context.
Definition: h264_slice.c:2792
H264SEIContext sei
Definition: h264dec.h:553
AVBufferRef * buf_ref
Definition: h264_sei.h:124
int ff_h264_sei_process_picture_timing(H264SEIPictureTiming *h, const SPS *sps, void *logctx)
Parse the contents of a picture timing message given an active SPS.
Definition: h264_sei.c:62
unsigned int crop_top
frame_cropping_rect_top_offset
Definition: h264_ps.h:70
#define USES_LIST(a, list)
Definition: mpegutils.h:99
void ff_color_frame(AVFrame *frame, const int color[4])
Definition: utils.c:414
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
const uint8_t * bytestream
Definition: cabac.h:48
int ref2frm[MAX_SLICES][2][64]
reference to frame number lists, used in the loop filter, the first 2 are for -2,-1 ...
Definition: h264dec.h:559
int deblocking_filter_parameters_present
deblocking_filter_parameters_present_flag
Definition: h264_ps.h:123
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:215
const PPS * pps
Definition: h264_ps.h:149
4: bottom field, top field, in that order
Definition: h264_sei.h:51
static enum AVPixelFormat non_j_pixfmt(enum AVPixelFormat a)
Definition: h264_slice.c:1004
uint8_t
int full_range
Definition: h264_ps.h:75
unsigned int crop_left
frame_cropping_rect_left_offset
Definition: h264_ps.h:68
int gaps_in_frame_num_allowed_flag
Definition: h264_ps.h:58
#define MB_MBAFF(h)
Definition: h264dec.h:71
int slice_alpha_c0_offset
Definition: h264dec.h:200
Stereo 3D type: this structure describes how two videos are packed within a single video surface...
Definition: stereo3d.h:176
int poc
Definition: h264dec.h:177
void ff_h264_set_erpic(ERPicture *dst, H264Picture *src)
Definition: h264_picture.c:136
int field_picture
whether or not picture was encoded in separate fields
Definition: h264dec.h:158
int bit_depth_chroma
bit_depth_chroma_minus8 + 8
Definition: h264_ps.h:101
void ff_h264_hl_decode_mb(const H264Context *h, H264SliceContext *sl)
Definition: h264_mb.c:799
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1617
size_t crop_left
Definition: frame.h:676
enum AVColorPrimaries color_primaries
Definition: h264_ps.h:77
int poc
frame POC
Definition: h264dec.h:148
int frame_num_offset
for POC type 2
Definition: h264_parse.h:51
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
int chroma_weight_flag[2]
7.4.3.2 chroma_weight_lX_flag
Definition: h264_parse.h:36
Multithreading support functions.
#define ER_MB_ERROR
int cabac
entropy_coding_mode_flag
Definition: h264_ps.h:113
#define MB_FIELD(sl)
Definition: h264dec.h:72
const char * from
Definition: jacosubdec.c:65
unsigned int crop_right
frame_cropping_rect_right_offset
Definition: h264_ps.h:69
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:414
uint8_t(*[2] top_borders)[(16 *3)*2]
Definition: h264dec.h:291
int invalid_gap
Definition: h264dec.h:162
ERPicture cur_pic
int frame_recovered
Initial frame has been completely recovered.
Definition: h264dec.h:530
Structure to hold side data for an AVFrame.
Definition: frame.h:220
int height
Definition: h264dec.h:365
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:38
#define height
#define MAX_PPS_COUNT
Definition: h264_ps.h:38
int pt
Definition: rtp.c:35
int transform_bypass
qpprime_y_zero_transform_bypass_flag
Definition: h264_ps.h:49
static void finish(void)
Definition: movenc.c:345
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:219
void ff_h264_filter_mb(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
char av_get_picture_type_char(enum AVPictureType pict_type)
Return a single letter to describe the given picture type pict_type.
Definition: utils.c:88
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
#define ER_MV_END
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:121
int redundant_pic_cnt_present
redundant_pic_cnt_present_flag
Definition: h264_ps.h:125
int picture_structure
Definition: h264dec.h:246
int chroma_y_shift
Definition: h264dec.h:366
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
AVDictionary * metadata
metadata.
Definition: frame.h:600
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:461
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:55
AVBufferRef * qscale_table_buf
Definition: h264dec.h:132
static int h264_export_frame_props(H264Context *h)
Definition: h264_slice.c:1134
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:404
H264Picture * parent
Definition: h264dec.h:180
#define FRAME_RECOVERED_SEI
Sufficient number of frames have been decoded since a SEI recovery point, so all the following frames...
Definition: h264dec.h:528
H264SEIAFD afd
Definition: h264_sei.h:186
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:308
int recovered
picture at IDR or recovery point + recovery count
Definition: h264dec.h:161
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:89
#define AV_COPY64(d, s)
Definition: intreadwrite.h:605
int ff_h264_decode_ref_pic_list_reordering(H264SliceContext *sl, void *logctx)
Definition: h264_refs.c:423
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:1173
#define FFALIGN(x, a)
Definition: macros.h:48
int chroma_qp[2]
Definition: h264dec.h:194
#define av_log(a,...)
int last_pocs[MAX_DELAYED_PIC_COUNT]
Definition: h264dec.h:472
const char * to
Definition: webvttdec.c:34
void ff_h264_direct_ref_list_init(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:121
int width
Definition: h264dec.h:365
static int h264_frame_start(H264Context *h)
Definition: h264_slice.c:468
H.264 common definitions.
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264dec.c:103
#define U(x)
Definition: vp56_arith.h:37
#define src
Definition: vp8dsp.c:254
int timecode_cnt
Number of timecode in use.
Definition: h264_sei.h:115
#define HWACCEL_MAX
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:849
H.264 parameter set handling.
H264Picture DPB[H264_MAX_PICTURE_COUNT]
Definition: h264dec.h:351
enum AVColorTransferCharacteristic color_trc
Definition: h264_ps.h:78
int mb_aff
mb_adaptive_frame_field_flag
Definition: h264_ps.h:63
H264PredContext hpc
Definition: h264dec.h:392
int chroma_log2_weight_denom
Definition: h264_parse.h:34
int width
Definition: frame.h:372
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:821
#define td
Definition: regdef.h:70
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
static int get_ue_golomb(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to 8190.
Definition: golomb.h:55
static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
Definition: h264_slice.c:128
int poc_type
pic_order_cnt_type
Definition: h264_ps.h:51
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
int context_initialized
Definition: h264dec.h:371
#define PTRDIFF_SPECIFIER
Definition: internal.h:228
ERContext er
Definition: h264dec.h:186
int nal_unit_type
Definition: h264dec.h:449
int ff_h264_decode_ref_pic_marking(H264SliceContext *sl, GetBitContext *gb, const H2645NAL *nal, void *logctx)
Definition: h264_refs.c:834
int ff_h264_get_profile(const SPS *sps)
Compute profile from profile_idc and constraint_set?_flags.
Definition: h264_parse.c:529
int num_reorder_frames
Definition: h264_ps.h:88
discard all bidirectional frames
Definition: avcodec.h:233
H264_SEI_FpaType arrangement_type
Definition: h264_sei.h:153
void * hwaccel_picture_private
hardware accelerator private data
Definition: h264dec.h:142
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2601
Display matrix.
Views are packed per line, as if interlaced.
Definition: stereo3d.h:129
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1809
static const uint8_t field_scan8x8[64+1]
Definition: h264_slice.c:57
const uint8_t ff_zigzag_scan[16+1]
Definition: mathtables.c:109
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:58
#define FIELD_PICTURE(h)
Definition: h264dec.h:74
int picture_idr
Definition: h264dec.h:384
const char * arg
Definition: jacosubdec.c:66
int deblocking_filter
disable_deblocking_filter_idc with 1 <-> 0
Definition: h264dec.h:199
uint8_t(*[2] mvd_table)[2]
Definition: h264dec.h:423
int prev_interlaced_frame
Complement sei_pic_struct SEI_PIC_STRUCT_TOP_BOTTOM and SEI_PIC_STRUCT_BOTTOM_TOP indicate interlaced...
Definition: h264dec.h:504
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:611
static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
Definition: h264_slice.c:2310
ThreadFrame tf
Definition: h264dec.h:130
simple assert() macros that are a bit more flexible than ISO C assert().
int weighted_pred
weighted_pred_flag
Definition: h264_ps.h:118
#define PICT_TOP_FIELD
Definition: mpegutils.h:37
H264QpelContext h264qpel
Definition: h264dec.h:349
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
int direct_spatial_mv_pred
Definition: h264dec.h:257
H264SEIUnregistered unregistered
Definition: h264_sei.h:188
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264dec.h:149
const uint8_t ff_h264_golomb_to_pict_type[5]
Definition: h264data.c:37
#define MAX_SLICES
Definition: dxva2_hevc.c:29
int valid_recovery_point
Are the SEI recovery points looking valid.
Definition: h264dec.h:509
GLsizei count
Definition: opengl_enc.c:108
int ff_h264_get_slice_type(const H264SliceContext *sl)
Reconstruct bitstream slice_type.
Definition: h264_slice.c:2208
#define FFMAX(a, b)
Definition: common.h:94
#define fail()
Definition: checkasm.h:123
uint8_t active_format_description
Definition: h264_sei.h:120
int delta_pic_order_always_zero_flag
Definition: h264_ps.h:53
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
int * mb_index2xy
int slice_type_nos
S free slice type (SI/SP are remapped to I/P)
Definition: h264dec.h:190
uint8_t zigzag_scan8x8[64]
Definition: h264dec.h:428
AVBufferRef * hwaccel_priv_buf
Definition: h264dec.h:141
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
Allocate, reallocate, or free an array through a pointer to a pointer.
Definition: mem.c:206
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:33
int crop_bottom
Definition: h264dec.h:389
uint8_t * error_status_table
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames.The frames must then be freed with ff_thread_release_buffer().Otherwise decode directly into the user-supplied frames.Call ff_thread_report_progress() after some part of the current picture has decoded.A good place to put this is where draw_horiz_band() is called-add this if it isn't called anywhere
size_t crop_top
Definition: frame.h:674
Views are alternated temporally.
Definition: stereo3d.h:92
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:502
int ff_h264_parse_ref_count(int *plist_count, int ref_count[2], GetBitContext *gb, const PPS *pps, int slice_type_nos, int picture_structure, void *logctx)
Definition: h264_parse.c:219
int nal_length_size
Number of bytes used for nal length (1, 2 or 4)
Definition: h264dec.h:457
useful rectangle filling function
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:397
int refs
number of reference frames
Definition: avcodec.h:1119
int prev_poc_msb
poc_msb of the last reference pic for POC type 0
Definition: h264_parse.h:49
#define ss(width, name, subs,...)
Definition: cbs_vp9.c:261
AVBufferRef * motion_val_buf[2]
Definition: h264dec.h:135
int ref_frame_count
num_ref_frames
Definition: h264_ps.h:57
enum AVPixelFormat * pix_fmts
array of supported pixel formats, or NULL if unknown, array is terminated by -1
Definition: codec.h:211
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:397
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:551
H264_SEI_PicStructType pic_struct
Definition: h264_sei.h:88
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1660
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
int x264_build
Definition: h264dec.h:374
int ct_type
Bit set of clock types for fields/frames in picture timing SEI message.
Definition: h264_sei.h:95
void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size)
Allocate and clear a buffer, reusing the given one if large enough.
Definition: mem.c:507
#define FFMIN(a, b)
Definition: common.h:96
uint16_t * slice_table
slice_table_base + 2*mb_stride + 1
Definition: h264dec.h:409
static void copy_picture_range(H264Picture **to, H264Picture **from, int count, H264Context *new_base, H264Context *old_base)
Definition: h264_slice.c:280
static int h264_field_start(H264Context *h, const H264SliceContext *sl, const H2645NAL *nal, int first_slice)
Definition: h264_slice.c:1450
uint8_t field_scan8x8_cavlc[64]
Definition: h264dec.h:432
#define IS_DIRECT(a)
Definition: mpegutils.h:84
CABACContext cabac
Cabac.
Definition: h264dec.h:324
int colour_description_present_flag
Definition: h264_ps.h:76
unsigned int first_mb_addr
Definition: h264dec.h:240
int reference
Definition: h264dec.h:160
static void er_add_slice(H264SliceContext *sl, int startx, int starty, int endx, int endy, int status)
Definition: h264_slice.c:2566
#define LEFT_MBS
Definition: h264dec.h:75
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AVRational sar
Definition: h264_ps.h:73
#define width
AVFrameSideData * av_frame_new_side_data_from_buf(AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef *buf)
Add a new side data to a frame from an existing AVBufferRef.
Definition: frame.c:694
int width
picture width / height.
Definition: avcodec.h:704
int redundant_pic_count
Definition: h264dec.h:250
int nb_slice_ctx
Definition: h264dec.h:357
uint8_t w
Definition: llviddspenc.c:38
H264PredWeightTable pwt
Definition: h264dec.h:203
int long_ref_count
number of actual long term references
Definition: h264dec.h:484
#define ER_DC_END
uint32_t * mb_type
Definition: h264dec.h:139
#define AV_FRAME_FLAG_CORRUPT
The frame data may be corrupted, e.g.
Definition: frame.h:539
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
Definition: h264_slice.c:1015
int size_in_bits
Definition: get_bits.h:68
int32_t
int ff_init_cabac_decoder(CABACContext *c, const uint8_t *buf, int buf_size)
Definition: cabac.c:176
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:1145
char * av_timecode_make_smpte_tc_string2(char *buf, AVRational rate, uint32_t tcsmpte, int prevent_df, int skip_field)
Get the timecode string from the SMPTE timecode format.
Definition: timecode.c:139
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1802
Context Adaptive Binary Arithmetic Coder inline functions.
int level
level
Definition: avcodec.h:1994
int init_qp
pic_init_qp_minus26 + 26
Definition: h264_ps.h:120
H.264 / AVC / MPEG-4 part10 codec.
enum AVChromaLocation chroma_location
Definition: h264_ps.h:80
int mmco_reset
Definition: h264dec.h:481
H264SliceContext * slice_ctx
Definition: h264dec.h:356
int direct_8x8_inference_flag
Definition: h264_ps.h:64
static int h264_select_output_frame(H264Context *h)
Definition: h264_slice.c:1347
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1671
int reference
Definition: h264dec.h:176
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:663
int top_borders_allocated[2]
Definition: h264dec.h:294
static void fill_rectangle(int x, int y, int w, int h)
Definition: ffplay.c:831
#define FIELD_OR_MBAFF_PICTURE(h)
Definition: h264dec.h:91
int ref_idc
H.264 only, nal_ref_idc.
Definition: h2645_parse.h:70
static void init_scan_tables(H264Context *h)
initialize scan tables
Definition: h264_slice.c:733
static int av_unused get_cabac_terminate(CABACContext *c)
int quincunx_sampling_flag
Definition: h264_sei.h:156
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:398
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:417
HW acceleration through CUDA.
Definition: pixfmt.h:235
int type
NAL unit type.
Definition: h2645_parse.h:52
#define FF_ARRAY_ELEMS(a)
Full range content.
Definition: pixfmt.h:586
static int init_table_pools(H264Context *h)
Definition: h264_slice.c:160
uint8_t * edge_emu_buffer
Definition: h264dec.h:290
if(ret)
static unsigned get_ue_golomb_long(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1.
Definition: golomb.h:106
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
int pic_order_present
pic_order_present_flag
Definition: h264_ps.h:114
uint8_t zigzag_scan_q0[16]
Definition: h264dec.h:433
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:387
int bit_depth_luma
luma bit depth from sps to detect changes
Definition: h264dec.h:459
int chroma_format_idc
chroma format from sps to detect changes
Definition: h264dec.h:460
VideoDSPContext vdsp
Definition: h264dec.h:346
int timing_info_present_flag
Definition: h264_ps.h:82
int coded_picture_number
picture number in bitstream order
Definition: frame.h:428
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
int mb_stride
Definition: h264dec.h:442
Views are packed in a checkerboard-like structure per pixel.
Definition: stereo3d.h:104
int postpone_filter
Definition: h264dec.h:379
#define IS_INTERLACED(a)
Definition: mpegutils.h:83
AVCodecContext * avctx
Definition: h264dec.h:345
uint8_t zigzag_scan8x8_q0[64]
Definition: h264dec.h:434
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:407
5: top field, bottom field, top field repeated, in that order
Definition: h264_sei.h:52
Libavcodec external API header.
#define MAX_DELAYED_PIC_COUNT
Definition: h264dec.h:56
Views are on top of each other.
Definition: stereo3d.h:79
int last_qscale_diff
Definition: h264dec.h:196
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: frame.h:84
AVBufferRef * pps_list[MAX_PPS_COUNT]
Definition: h264_ps.h:145
enum AVCodecID codec_id
Definition: avcodec.h:541
static int get_ue_golomb_31(GetBitContext *gb)
read unsigned exp golomb code, constraint to a max of 31.
Definition: golomb.h:122
int crop_left
Definition: h264dec.h:386
int delta_poc_bottom
Definition: h264_parse.h:46
ERPicture next_pic
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:345
H264Picture * short_ref[32]
Definition: h264dec.h:469
int next_outputed_poc
Definition: h264dec.h:474
int ff_h264_decode_mb_cabac(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cabac.c:1911
int explicit_ref_marking
Definition: h264dec.h:482
#define AV_CODEC_FLAG2_FAST
Allow non spec compliant speedup tricks.
Definition: avcodec.h:348
int field_poc[2]
top/bottom POC
Definition: h264dec.h:147
int debug
debug
Definition: avcodec.h:1616
int recovery_frame
recovery_frame is the frame_num at which the next frame should be fully constructed.
Definition: h264dec.h:517
main external API structure.
Definition: avcodec.h:531
User data unregistered metadata associated with a video frame.
Definition: frame.h:194
int qp_thresh
QP threshold to skip loopfilter.
Definition: h264dec.h:195
int explicit_ref_marking
Definition: h264dec.h:330
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
uint8_t * data
The data buffer.
Definition: buffer.h:89
H264SEITimeCode timecode[3]
Maximum three timecodes in a pic_timing SEI.
Definition: h264_sei.h:110
#define fp
Definition: regdef.h:44
uint8_t * data
Definition: frame.h:222
int mb_height
Definition: h264dec.h:168
static int h264_slice_header_parse(const H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1736
H264SEIA53Caption a53_caption
Definition: h264_sei.h:187
int implicit_weight[48][48][2]
Definition: h264_parse.h:40
size_t crop_right
Definition: frame.h:677
int8_t * qscale_table
Definition: h264dec.h:133
static const uint8_t scan8[16 *3+3]
Definition: h264dec.h:650
#define CABAC(h)
Definition: h264_cabac.c:28
AVBufferRef * av_buffer_allocz(int size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
Definition: buffer.c:83
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
AVBuffer * buffer
Definition: buffer.h:82
static const uint8_t field_scan8x8_cavlc[64+1]
Definition: h264_slice.c:76
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:498
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
AVCodecContext * owner[2]
Definition: thread.h:36
int coded_height
Definition: avcodec.h:719
Switching Predicted.
Definition: avutil.h:279
int prev_frame_num
frame_num of the last pic for POC type 1/2
Definition: h264_parse.h:53
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:2205
static int FUNC() pps(CodedBitstreamContext *ctx, RWContext *rw, H264RawPPS *current)
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, int size)
Add a new side data to a frame.
Definition: frame.c:726
uint8_t non_zero_count_cache[15 *8]
non zero coeff count cache.
Definition: h264dec.h:300
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:303
#define FRAME_MBAFF(h)
Definition: h264dec.h:73
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1159
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:1152
#define LBOT
Definition: h264dec.h:77
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:197
static av_always_inline uint32_t pack16to32(unsigned a, unsigned b)
Definition: h264dec.h:666
int8_t * ref_index[2]
Definition: h264dec.h:145
int ff_h264_ref_picture(H264Context *h, H264Picture *dst, H264Picture *src)
Definition: h264_picture.c:66
A reference counted buffer type.
int pixel_shift
0 for 8-bit H.264, 1 for high-bit-depth H.264
Definition: h264dec.h:362
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
int mmco_reset
MMCO_RESET set this 1.
Definition: h264dec.h:150
int content_interpretation_type
Definition: h264_sei.h:155
H264Picture * cur_pic_ptr
Definition: h264dec.h:352
#define LIST_NOT_USED
Definition: h264dec.h:396
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
ptrdiff_t mb_uvlinesize
Definition: h264dec.h:234
static int h264_slice_header_init(H264Context *h)
Definition: h264_slice.c:920
static int FUNC() sps(CodedBitstreamContext *ctx, RWContext *rw, H264RawSPS *current)
int mb_mbaff
mb_aff_frame && mb_field_decoding_flag
Definition: h264dec.h:248
int enable_er
Definition: h264dec.h:551
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:396
#define IS_INTER(a)
Definition: mpegutils.h:79
#define FF_COMPLIANCE_STRICT
Strictly conform to all the things in the spec no matter what consequences.
Definition: avcodec.h:1596
const SPS * sps
Definition: h264_ps.h:150
unsigned int sps_id
Definition: h264_ps.h:112
#define TRANSPOSE(x)
H264SEIPictureTiming picture_timing
Definition: h264_sei.h:185
int width_from_caller
Definition: h264dec.h:548
int log2_max_poc_lsb
log2_max_pic_order_cnt_lsb_minus4
Definition: h264_ps.h:52
H264SEIRecoveryPoint recovery_point
Definition: h264_sei.h:189
ptrdiff_t mb_linesize
may be equal to s->linesize or s->linesize * 2, for mbaff
Definition: h264dec.h:233
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:137
int16_t slice_row[MAX_SLICES]
to detect when MAX_SLICES is too low
Definition: h264dec.h:544
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:300
3: top field, bottom field, in that order
Definition: h264_sei.h:50
static int alloc_picture(H264Context *h, H264Picture *pic)
Definition: h264_slice.c:187
ptrdiff_t linesize
Definition: h264dec.h:232
int block_offset[2 *(16 *3)]
block_offset[ 0..23] for frame macroblocks block_offset[24..47] for field macroblocks ...
Definition: h264dec.h:403
uint32_t time_scale
Definition: h264_ps.h:84
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:408
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:416
int transform_8x8_mode
transform_8x8_mode_flag
Definition: h264_ps.h:126
ptrdiff_t uvlinesize
Definition: h264dec.h:232
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
static int h264_slice_init(H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1947
int pic_struct_present_flag
Definition: h264_ps.h:94
#define CHROMA444(h)
Definition: h264dec.h:99
unsigned int list_count
Definition: h264dec.h:274
uint8_t zigzag_scan[16]
Definition: h264dec.h:427
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:328
AVBufferRef * pps_buf
Definition: h264dec.h:165
int prev_poc_lsb
poc_lsb of the last reference pic for POC type 0
Definition: h264_parse.h:50
static void release_unused_pictures(H264Context *h, int remove_current)
Definition: h264_slice.c:115
int ff_h264_alloc_tables(H264Context *h)
Allocate tables.
Definition: h264dec.c:181
#define AV_ZERO128(d)
Definition: intreadwrite.h:637
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:313
Narrow or limited range content.
Definition: pixfmt.h:569
int mb_stride
Definition: h264dec.h:169
int left_type[LEFT_MBS]
Definition: h264dec.h:222
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
int nb_slice_ctx_queued
Definition: h264dec.h:358
discard all non reference
Definition: avcodec.h:232
int ff_h264_field_end(H264Context *h, H264SliceContext *sl, int in_setup)
Definition: h264_picture.c:159
AVBufferPool * qscale_table_pool
Definition: h264dec.h:555
H264Picture * next_output_pic
Definition: h264dec.h:473
int mb_height
Definition: h264_ps.h:61
AVBufferPool * motion_val_pool
Definition: h264dec.h:557
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
int delta_poc_bottom
Definition: h264dec.h:334
#define IS_8x8DCT(a)
Definition: h264dec.h:104
common internal api header.
AVBufferPool * av_buffer_pool_init(int size, AVBufferRef *(*alloc)(int size))
Allocate and initialize a buffer pool.
Definition: buffer.c:266
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
#define AV_COPY128(d, s)
Definition: intreadwrite.h:609
static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
Definition: h264_slice.c:767
AVBufferRef * pps_ref
Definition: h264_ps.h:147
int log2_max_frame_num
log2_max_frame_num_minus4 + 4
Definition: h264_ps.h:50
int missing_fields
Definition: h264dec.h:534
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:2942
H264ParamSets ps
Definition: h264dec.h:462
H264SEIFramePacking frame_packing
Definition: h264_sei.h:191
H.264 / AVC / MPEG-4 part10 motion vector prediction.
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
Bi-dir predicted.
Definition: avutil.h:276
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
Stereoscopic video.
Views are packed per column.
Definition: stereo3d.h:141
int cur_chroma_format_idc
Definition: h264dec.h:542
int8_t * intra4x4_pred_mode
Definition: h264dec.h:212
unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:2203
enum AVDiscard skip_loop_filter
Skip loop filtering for selected frames.
Definition: avcodec.h:2002
int den
Denominator.
Definition: rational.h:60
static void predict_field_decoding_flag(const H264Context *h, H264SliceContext *sl)
Definition: h264_slice.c:2522
int ff_h264_decode_mb_cavlc(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cavlc.c:702
GetBitContext gb
Definition: h2645_parse.h:47
int bit_depth_luma
bit_depth_luma_minus8 + 8
Definition: h264_ps.h:100
AVBufferRef ** buf_ref
Definition: h264_sei.h:129
#define IS_INTRA(x, y)
int present
Definition: h264_sei.h:119
int delta_poc[2]
Definition: h264_parse.h:47
uint32_t av_timecode_get_smpte(AVRational rate, int drop, int hh, int mm, int ss, int ff)
Convert sei info to SMPTE 12M binary representation.
Definition: timecode.c:71
void ff_h264_free_tables(H264Context *h)
Definition: h264dec.c:138
void * priv_data
Definition: avcodec.h:558
#define LTOP
Definition: h264dec.h:76
#define PICT_FRAME
Definition: mpegutils.h:39
static av_always_inline void backup_mb_border(const H264Context *h, H264SliceContext *sl, uint8_t *src_y, uint8_t *src_cb, uint8_t *src_cr, int linesize, int uvlinesize, int simple)
Definition: h264_slice.c:567
uint8_t zigzag_scan8x8_cavlc_q0[64]
Definition: h264dec.h:435
int8_t ref_cache[2][5 *8]
Definition: h264dec.h:306
#define AV_CODEC_FLAG_OUTPUT_CORRUPT
Output even those frames that might be corrupted.
Definition: avcodec.h:283
unsigned int pps_id
Definition: h264dec.h:284
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:466
int frame_priv_data_size
Size of per-frame hardware accelerator private data.
Definition: avcodec.h:2532
#define CHROMA422(h)
Definition: h264dec.h:98
#define FF_BUG_TRUNCATED
Definition: avcodec.h:1579
H264Picture cur_pic
Definition: h264dec.h:353
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:392
#define AV_ZERO32(d)
Definition: intreadwrite.h:629
int mb_width
Definition: h264dec.h:441
static int find_unused_picture(H264Context *h)
Definition: h264_slice.c:261
int current_slice
current slice number, used to initialize slice_num of each thread/context
Definition: h264dec.h:494
int ff_h264_execute_ref_pic_marking(H264Context *h)
Execute the reference picture marking (memory management control operations).
Definition: h264_refs.c:610
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:693
int ff_h264_pred_weight_table(GetBitContext *gb, const SPS *sps, const int *ref_count, int slice_type_nos, H264PredWeightTable *pwt, int picture_structure, void *logctx)
Definition: h264_parse.c:27
int mb_width
pic_width_in_mbs_minus1 + 1
Definition: h264_ps.h:59
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:618
uint32_t * mb2b_xy
Definition: h264dec.h:405
H264Ref ref_list[2][48]
0..15: frame refs, 16..47: mbaff field refs.
Definition: h264dec.h:275
uint8_t field_scan8x8_cavlc_q0[64]
Definition: h264dec.h:438
int cur_bit_depth_luma
Definition: h264dec.h:543
int crop_top
Definition: h264dec.h:388
atomic_int error_count
AVBufferRef * ref_index_buf[2]
Definition: h264dec.h:144
av_cold void ff_h264dsp_init(H264DSPContext *c, const int bit_depth, const int chroma_format_idc)
Definition: h264dsp.c:67
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:1222
H264DSPContext h264dsp
Definition: h264dec.h:347
void ff_er_frame_start(ERContext *s)
int height
Definition: frame.h:372
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:376
FILE * out
Definition: movenc.c:54
uint8_t(*[2] mvd_table)[2]
Definition: h264dec.h:319
uint8_t field_scan8x8[64]
Definition: h264dec.h:431
int slice_type_fixed
Definition: h264dec.h:191
static av_always_inline void fill_filter_caches_inter(const H264Context *h, H264SliceContext *sl, int mb_type, int top_xy, int left_xy[LEFT_MBS], int top_type, int left_type[LEFT_MBS], int mb_xy, int list)
Definition: h264_slice.c:2226
int mb_width
Definition: h264dec.h:168
#define av_freep(p)
const SPS * sps
Definition: h264_ps.h:140
int prev_frame_num_offset
for POC type 2
Definition: h264_parse.h:52
#define av_always_inline
Definition: attributes.h:45
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:229
int slice_beta_offset
Definition: h264dec.h:201
int8_t * intra4x4_pred_mode
Definition: h264dec.h:391
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:364
#define ER_AC_END
static int decode_slice(struct AVCodecContext *avctx, void *arg)
Definition: h264_slice.c:2580
int delta_poc[2]
Definition: h264dec.h:335
void ff_h264_direct_dist_scale_factor(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:62
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
int ff_h264_slice_context_init(H264Context *h, H264SliceContext *sl)
Init context Allocate buffers which are not shared amongst multiple threads.
Definition: h264dec.c:222
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:216
uint8_t field_scan_q0[16]
Definition: h264dec.h:436
int mb_field_decoding_flag
Definition: h264dec.h:247
uint8_t(* non_zero_count)[48]
Definition: h264dec.h:394
static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
Definition: h264_slice.c:2452
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1837
unsigned int crop_bottom
frame_cropping_rect_bottom_offset
Definition: h264_ps.h:71
exp golomb vlc stuff
uint8_t * bipred_scratchpad
Definition: h264dec.h:289
void ff_h264_unref_picture(H264Context *h, H264Picture *pic)
Definition: h264_picture.c:44
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
av_cold void ff_h264qpel_init(H264QpelContext *c, int bit_depth)
Definition: h264qpel.c:49
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:514
int droppable
Definition: h264dec.h:368
int level_idc
Definition: h264_ps.h:47
int strict_std_compliance
strictly follow the standard (MPEG-4, ...).
Definition: avcodec.h:1594
int crop_right
Definition: h264dec.h:387
void ff_h264_filter_mb_fast(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
int nal_ref_idc
Definition: h264dec.h:448
GetBitContext gb
Definition: h264dec.h:185
uint8_t field_scan[16]
Definition: h264dec.h:430
int cabac_init_idc
Definition: h264dec.h:326
#define FRAME_RECOVERED_IDR
We have seen an IDR, so all the following frames in coded order are correctly decodable.
Definition: h264dec.h:523
for(j=16;j >0;--j)
6: bottom field, top field, bottom field repeated, in that order
Definition: h264_sei.h:53
int i
Definition: input.c:407
#define FFMAX3(a, b, c)
Definition: common.h:95
int b_stride
Definition: h264dec.h:407
Predicted.
Definition: avutil.h:275
#define tb
Definition: regdef.h:68
int av_buffer_replace(AVBufferRef **pdst, AVBufferRef *src)
Ensure dst refers to the same data as src.
Definition: buffer.c:219
Context Adaptive Binary Arithmetic Coder.
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
FF_DISABLE_DEPRECATION_WARNINGS enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Wrapper around get_format() for frame-multithreaded codecs.
#define H264_MAX_PICTURE_COUNT
Definition: h264dec.h:52
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
void ff_h264_init_cabac_states(const H264Context *h, H264SliceContext *sl)
Definition: h264_cabac.c:1262
int short_ref_count
number of actual short term references
Definition: h264dec.h:485
static uint8_t tmp[11]
Definition: aes_ctr.c:26
enum AVColorSpace colorspace
Definition: h264_ps.h:79