FFmpeg
h264_slice.c
Go to the documentation of this file.
1 /*
2  * H.26L/H.264/AVC/JVT/14496-10/... decoder
3  * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * H.264 / AVC / MPEG-4 part10 codec.
25  * @author Michael Niedermayer <michaelni@gmx.at>
26  */
27 
28 #include "config_components.h"
29 
30 #include "libavutil/avassert.h"
31 #include "libavutil/display.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/timecode.h"
35 #include "internal.h"
36 #include "cabac.h"
37 #include "cabac_functions.h"
38 #include "decode.h"
39 #include "error_resilience.h"
40 #include "avcodec.h"
41 #include "h264.h"
42 #include "h264dec.h"
43 #include "h264data.h"
44 #include "h264chroma.h"
45 #include "h264_ps.h"
46 #include "golomb.h"
47 #include "mathops.h"
48 #include "mpegutils.h"
49 #include "rectangle.h"
50 #include "thread.h"
51 #include "threadframe.h"
52 
53 static const uint8_t field_scan[16+1] = {
54  0 + 0 * 4, 0 + 1 * 4, 1 + 0 * 4, 0 + 2 * 4,
55  0 + 3 * 4, 1 + 1 * 4, 1 + 2 * 4, 1 + 3 * 4,
56  2 + 0 * 4, 2 + 1 * 4, 2 + 2 * 4, 2 + 3 * 4,
57  3 + 0 * 4, 3 + 1 * 4, 3 + 2 * 4, 3 + 3 * 4,
58 };
59 
60 static const uint8_t field_scan8x8[64+1] = {
61  0 + 0 * 8, 0 + 1 * 8, 0 + 2 * 8, 1 + 0 * 8,
62  1 + 1 * 8, 0 + 3 * 8, 0 + 4 * 8, 1 + 2 * 8,
63  2 + 0 * 8, 1 + 3 * 8, 0 + 5 * 8, 0 + 6 * 8,
64  0 + 7 * 8, 1 + 4 * 8, 2 + 1 * 8, 3 + 0 * 8,
65  2 + 2 * 8, 1 + 5 * 8, 1 + 6 * 8, 1 + 7 * 8,
66  2 + 3 * 8, 3 + 1 * 8, 4 + 0 * 8, 3 + 2 * 8,
67  2 + 4 * 8, 2 + 5 * 8, 2 + 6 * 8, 2 + 7 * 8,
68  3 + 3 * 8, 4 + 1 * 8, 5 + 0 * 8, 4 + 2 * 8,
69  3 + 4 * 8, 3 + 5 * 8, 3 + 6 * 8, 3 + 7 * 8,
70  4 + 3 * 8, 5 + 1 * 8, 6 + 0 * 8, 5 + 2 * 8,
71  4 + 4 * 8, 4 + 5 * 8, 4 + 6 * 8, 4 + 7 * 8,
72  5 + 3 * 8, 6 + 1 * 8, 6 + 2 * 8, 5 + 4 * 8,
73  5 + 5 * 8, 5 + 6 * 8, 5 + 7 * 8, 6 + 3 * 8,
74  7 + 0 * 8, 7 + 1 * 8, 6 + 4 * 8, 6 + 5 * 8,
75  6 + 6 * 8, 6 + 7 * 8, 7 + 2 * 8, 7 + 3 * 8,
76  7 + 4 * 8, 7 + 5 * 8, 7 + 6 * 8, 7 + 7 * 8,
77 };
78 
79 static const uint8_t field_scan8x8_cavlc[64+1] = {
80  0 + 0 * 8, 1 + 1 * 8, 2 + 0 * 8, 0 + 7 * 8,
81  2 + 2 * 8, 2 + 3 * 8, 2 + 4 * 8, 3 + 3 * 8,
82  3 + 4 * 8, 4 + 3 * 8, 4 + 4 * 8, 5 + 3 * 8,
83  5 + 5 * 8, 7 + 0 * 8, 6 + 6 * 8, 7 + 4 * 8,
84  0 + 1 * 8, 0 + 3 * 8, 1 + 3 * 8, 1 + 4 * 8,
85  1 + 5 * 8, 3 + 1 * 8, 2 + 5 * 8, 4 + 1 * 8,
86  3 + 5 * 8, 5 + 1 * 8, 4 + 5 * 8, 6 + 1 * 8,
87  5 + 6 * 8, 7 + 1 * 8, 6 + 7 * 8, 7 + 5 * 8,
88  0 + 2 * 8, 0 + 4 * 8, 0 + 5 * 8, 2 + 1 * 8,
89  1 + 6 * 8, 4 + 0 * 8, 2 + 6 * 8, 5 + 0 * 8,
90  3 + 6 * 8, 6 + 0 * 8, 4 + 6 * 8, 6 + 2 * 8,
91  5 + 7 * 8, 6 + 4 * 8, 7 + 2 * 8, 7 + 6 * 8,
92  1 + 0 * 8, 1 + 2 * 8, 0 + 6 * 8, 3 + 0 * 8,
93  1 + 7 * 8, 3 + 2 * 8, 2 + 7 * 8, 4 + 2 * 8,
94  3 + 7 * 8, 5 + 2 * 8, 4 + 7 * 8, 5 + 4 * 8,
95  6 + 3 * 8, 6 + 5 * 8, 7 + 3 * 8, 7 + 7 * 8,
96 };
97 
98 // zigzag_scan8x8_cavlc[i] = zigzag_scan8x8[(i/4) + 16*(i%4)]
99 static const uint8_t zigzag_scan8x8_cavlc[64+1] = {
100  0 + 0 * 8, 1 + 1 * 8, 1 + 2 * 8, 2 + 2 * 8,
101  4 + 1 * 8, 0 + 5 * 8, 3 + 3 * 8, 7 + 0 * 8,
102  3 + 4 * 8, 1 + 7 * 8, 5 + 3 * 8, 6 + 3 * 8,
103  2 + 7 * 8, 6 + 4 * 8, 5 + 6 * 8, 7 + 5 * 8,
104  1 + 0 * 8, 2 + 0 * 8, 0 + 3 * 8, 3 + 1 * 8,
105  3 + 2 * 8, 0 + 6 * 8, 4 + 2 * 8, 6 + 1 * 8,
106  2 + 5 * 8, 2 + 6 * 8, 6 + 2 * 8, 5 + 4 * 8,
107  3 + 7 * 8, 7 + 3 * 8, 4 + 7 * 8, 7 + 6 * 8,
108  0 + 1 * 8, 3 + 0 * 8, 0 + 4 * 8, 4 + 0 * 8,
109  2 + 3 * 8, 1 + 5 * 8, 5 + 1 * 8, 5 + 2 * 8,
110  1 + 6 * 8, 3 + 5 * 8, 7 + 1 * 8, 4 + 5 * 8,
111  4 + 6 * 8, 7 + 4 * 8, 5 + 7 * 8, 6 + 7 * 8,
112  0 + 2 * 8, 2 + 1 * 8, 1 + 3 * 8, 5 + 0 * 8,
113  1 + 4 * 8, 2 + 4 * 8, 6 + 0 * 8, 4 + 3 * 8,
114  0 + 7 * 8, 4 + 4 * 8, 7 + 2 * 8, 3 + 6 * 8,
115  5 + 5 * 8, 6 + 5 * 8, 6 + 6 * 8, 7 + 7 * 8,
116 };
117 
118 static void release_unused_pictures(H264Context *h, int remove_current)
119 {
120  int i;
121 
122  /* release non reference frames */
123  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
124  if (h->DPB[i].f->buf[0] && !h->DPB[i].reference &&
125  (remove_current || &h->DPB[i] != h->cur_pic_ptr)) {
126  ff_h264_unref_picture(h, &h->DPB[i]);
127  }
128  }
129 }
130 
131 static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
132 {
133  const H264Context *h = sl->h264;
134  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
135 
136  av_fast_malloc(&sl->bipred_scratchpad, &sl->bipred_scratchpad_allocated, 16 * 6 * alloc_size);
137  // edge emu needs blocksize + filter length - 1
138  // (= 21x21 for H.264)
139  av_fast_malloc(&sl->edge_emu_buffer, &sl->edge_emu_buffer_allocated, alloc_size * 2 * 21);
140 
142  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
144  h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
145 
146  if (!sl->bipred_scratchpad || !sl->edge_emu_buffer ||
147  !sl->top_borders[0] || !sl->top_borders[1]) {
150  av_freep(&sl->top_borders[0]);
151  av_freep(&sl->top_borders[1]);
152 
155  sl->top_borders_allocated[0] = 0;
156  sl->top_borders_allocated[1] = 0;
157  return AVERROR(ENOMEM);
158  }
159 
160  return 0;
161 }
162 
164 {
165  const int big_mb_num = h->mb_stride * (h->mb_height + 1) + 1;
166  const int mb_array_size = h->mb_stride * h->mb_height;
167  const int b4_stride = h->mb_width * 4 + 1;
168  const int b4_array_size = b4_stride * h->mb_height * 4;
169 
170  h->qscale_table_pool = av_buffer_pool_init(big_mb_num + h->mb_stride,
172  h->mb_type_pool = av_buffer_pool_init((big_mb_num + h->mb_stride) *
173  sizeof(uint32_t), av_buffer_allocz);
174  h->motion_val_pool = av_buffer_pool_init(2 * (b4_array_size + 4) *
175  sizeof(int16_t), av_buffer_allocz);
176  h->ref_index_pool = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
177 
178  if (!h->qscale_table_pool || !h->mb_type_pool || !h->motion_val_pool ||
179  !h->ref_index_pool) {
180  av_buffer_pool_uninit(&h->qscale_table_pool);
181  av_buffer_pool_uninit(&h->mb_type_pool);
182  av_buffer_pool_uninit(&h->motion_val_pool);
183  av_buffer_pool_uninit(&h->ref_index_pool);
184  return AVERROR(ENOMEM);
185  }
186 
187  return 0;
188 }
189 
191 {
192  int i, ret = 0;
193 
194  av_assert0(!pic->f->data[0]);
195 
196  pic->tf.f = pic->f;
197  ret = ff_thread_get_ext_buffer(h->avctx, &pic->tf,
198  pic->reference ? AV_GET_BUFFER_FLAG_REF : 0);
199  if (ret < 0)
200  goto fail;
201 
202  if (pic->needs_fg) {
203  pic->f_grain->format = pic->f->format;
204  pic->f_grain->width = pic->f->width;
205  pic->f_grain->height = pic->f->height;
206  ret = ff_thread_get_buffer(h->avctx, pic->f_grain, 0);
207  if (ret < 0)
208  goto fail;
209  }
210 
211  if (h->avctx->hwaccel) {
212  const AVHWAccel *hwaccel = h->avctx->hwaccel;
214  if (hwaccel->frame_priv_data_size) {
216  if (!pic->hwaccel_priv_buf)
217  return AVERROR(ENOMEM);
219  }
220  }
221  if (CONFIG_GRAY && !h->avctx->hwaccel && h->flags & AV_CODEC_FLAG_GRAY && pic->f->data[2]) {
222  int h_chroma_shift, v_chroma_shift;
224  &h_chroma_shift, &v_chroma_shift);
225 
226  for(i=0; i<AV_CEIL_RSHIFT(pic->f->height, v_chroma_shift); i++) {
227  memset(pic->f->data[1] + pic->f->linesize[1]*i,
228  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
229  memset(pic->f->data[2] + pic->f->linesize[2]*i,
230  0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
231  }
232  }
233 
234  if (!h->qscale_table_pool) {
236  if (ret < 0)
237  goto fail;
238  }
239 
240  pic->qscale_table_buf = av_buffer_pool_get(h->qscale_table_pool);
241  pic->mb_type_buf = av_buffer_pool_get(h->mb_type_pool);
242  if (!pic->qscale_table_buf || !pic->mb_type_buf)
243  goto fail;
244 
245  pic->mb_type = (uint32_t*)pic->mb_type_buf->data + 2 * h->mb_stride + 1;
246  pic->qscale_table = pic->qscale_table_buf->data + 2 * h->mb_stride + 1;
247 
248  for (i = 0; i < 2; i++) {
249  pic->motion_val_buf[i] = av_buffer_pool_get(h->motion_val_pool);
250  pic->ref_index_buf[i] = av_buffer_pool_get(h->ref_index_pool);
251  if (!pic->motion_val_buf[i] || !pic->ref_index_buf[i])
252  goto fail;
253 
254  pic->motion_val[i] = (int16_t (*)[2])pic->motion_val_buf[i]->data + 4;
255  pic->ref_index[i] = pic->ref_index_buf[i]->data;
256  }
257 
258  pic->pps_buf = av_buffer_ref(h->ps.pps_ref);
259  if (!pic->pps_buf)
260  goto fail;
261  pic->pps = (const PPS*)pic->pps_buf->data;
262 
263  pic->mb_width = h->mb_width;
264  pic->mb_height = h->mb_height;
265  pic->mb_stride = h->mb_stride;
266 
267  return 0;
268 fail:
269  ff_h264_unref_picture(h, pic);
270  return (ret < 0) ? ret : AVERROR(ENOMEM);
271 }
272 
274 {
275  int i;
276 
277  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
278  if (!h->DPB[i].f->buf[0])
279  return i;
280  }
281  return AVERROR_INVALIDDATA;
282 }
283 
284 
285 #define IN_RANGE(a, b, size) (((void*)(a) >= (void*)(b)) && ((void*)(a) < (void*)((b) + (size))))
286 
287 #define REBASE_PICTURE(pic, new_ctx, old_ctx) \
288  (((pic) && (pic) >= (old_ctx)->DPB && \
289  (pic) < (old_ctx)->DPB + H264_MAX_PICTURE_COUNT) ? \
290  &(new_ctx)->DPB[(pic) - (old_ctx)->DPB] : NULL)
291 
292 static void copy_picture_range(H264Picture **to, H264Picture **from, int count,
293  H264Context *new_base,
294  H264Context *old_base)
295 {
296  int i;
297 
298  for (i = 0; i < count; i++) {
299  av_assert1(!from[i] ||
300  IN_RANGE(from[i], old_base, 1) ||
301  IN_RANGE(from[i], old_base->DPB, H264_MAX_PICTURE_COUNT));
302  to[i] = REBASE_PICTURE(from[i], new_base, old_base);
303  }
304 }
305 
307 
309  const AVCodecContext *src)
310 {
311  H264Context *h = dst->priv_data, *h1 = src->priv_data;
312  int inited = h->context_initialized, err = 0;
313  int need_reinit = 0;
314  int i, ret;
315 
316  if (dst == src)
317  return 0;
318 
319  if (inited && !h1->ps.sps)
320  return AVERROR_INVALIDDATA;
321 
322  if (inited &&
323  (h->width != h1->width ||
324  h->height != h1->height ||
325  h->mb_width != h1->mb_width ||
326  h->mb_height != h1->mb_height ||
327  !h->ps.sps ||
328  h->ps.sps->bit_depth_luma != h1->ps.sps->bit_depth_luma ||
329  h->ps.sps->chroma_format_idc != h1->ps.sps->chroma_format_idc ||
330  h->ps.sps->vui.matrix_coeffs != h1->ps.sps->vui.matrix_coeffs)) {
331  need_reinit = 1;
332  }
333 
334  /* copy block_offset since frame_start may not be called */
335  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
336 
337  // SPS/PPS
338  for (i = 0; i < FF_ARRAY_ELEMS(h->ps.sps_list); i++) {
339  ret = av_buffer_replace(&h->ps.sps_list[i], h1->ps.sps_list[i]);
340  if (ret < 0)
341  return ret;
342  }
343  for (i = 0; i < FF_ARRAY_ELEMS(h->ps.pps_list); i++) {
344  ret = av_buffer_replace(&h->ps.pps_list[i], h1->ps.pps_list[i]);
345  if (ret < 0)
346  return ret;
347  }
348 
349  ret = av_buffer_replace(&h->ps.pps_ref, h1->ps.pps_ref);
350  if (ret < 0)
351  return ret;
352  h->ps.pps = NULL;
353  h->ps.sps = NULL;
354  if (h1->ps.pps_ref) {
355  h->ps.pps = (const PPS*)h->ps.pps_ref->data;
356  h->ps.sps = h->ps.pps->sps;
357  }
358 
359  if (need_reinit || !inited) {
360  h->width = h1->width;
361  h->height = h1->height;
362  h->mb_height = h1->mb_height;
363  h->mb_width = h1->mb_width;
364  h->mb_num = h1->mb_num;
365  h->mb_stride = h1->mb_stride;
366  h->b_stride = h1->b_stride;
367  h->x264_build = h1->x264_build;
368 
369  if (h->context_initialized || h1->context_initialized) {
370  if ((err = h264_slice_header_init(h)) < 0) {
371  av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
372  return err;
373  }
374  }
375 
376  /* copy block_offset since frame_start may not be called */
377  memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
378  }
379 
380  h->avctx->coded_height = h1->avctx->coded_height;
381  h->avctx->coded_width = h1->avctx->coded_width;
382  h->avctx->width = h1->avctx->width;
383  h->avctx->height = h1->avctx->height;
384  h->width_from_caller = h1->width_from_caller;
385  h->height_from_caller = h1->height_from_caller;
386  h->coded_picture_number = h1->coded_picture_number;
387  h->first_field = h1->first_field;
388  h->picture_structure = h1->picture_structure;
389  h->mb_aff_frame = h1->mb_aff_frame;
390  h->droppable = h1->droppable;
391 
392  for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
393  ret = ff_h264_replace_picture(h, &h->DPB[i], &h1->DPB[i]);
394  if (ret < 0)
395  return ret;
396  }
397 
398  h->cur_pic_ptr = REBASE_PICTURE(h1->cur_pic_ptr, h, h1);
399  ret = ff_h264_replace_picture(h, &h->cur_pic, &h1->cur_pic);
400  if (ret < 0)
401  return ret;
402 
403  h->enable_er = h1->enable_er;
404  h->workaround_bugs = h1->workaround_bugs;
405  h->droppable = h1->droppable;
406 
407  // extradata/NAL handling
408  h->is_avc = h1->is_avc;
409  h->nal_length_size = h1->nal_length_size;
410 
411  memcpy(&h->poc, &h1->poc, sizeof(h->poc));
412 
413  memcpy(h->short_ref, h1->short_ref, sizeof(h->short_ref));
414  memcpy(h->long_ref, h1->long_ref, sizeof(h->long_ref));
415  memcpy(h->delayed_pic, h1->delayed_pic, sizeof(h->delayed_pic));
416  memcpy(h->last_pocs, h1->last_pocs, sizeof(h->last_pocs));
417 
418  h->next_output_pic = h1->next_output_pic;
419  h->next_outputed_poc = h1->next_outputed_poc;
420  h->poc_offset = h1->poc_offset;
421 
422  memcpy(h->mmco, h1->mmco, sizeof(h->mmco));
423  h->nb_mmco = h1->nb_mmco;
424  h->mmco_reset = h1->mmco_reset;
425  h->explicit_ref_marking = h1->explicit_ref_marking;
426  h->long_ref_count = h1->long_ref_count;
427  h->short_ref_count = h1->short_ref_count;
428 
429  copy_picture_range(h->short_ref, h1->short_ref, 32, h, h1);
430  copy_picture_range(h->long_ref, h1->long_ref, 32, h, h1);
431  copy_picture_range(h->delayed_pic, h1->delayed_pic,
432  FF_ARRAY_ELEMS(h->delayed_pic), h, h1);
433 
434  h->frame_recovered = h1->frame_recovered;
435 
436  ret = ff_h264_sei_ctx_replace(&h->sei, &h1->sei);
437  if (ret < 0)
438  return ret;
439 
440  h->sei.common.unregistered.x264_build = h1->sei.common.unregistered.x264_build;
441 
442  if (!h->cur_pic_ptr)
443  return 0;
444 
445  if (!h->droppable) {
447  h->poc.prev_poc_msb = h->poc.poc_msb;
448  h->poc.prev_poc_lsb = h->poc.poc_lsb;
449  }
450  h->poc.prev_frame_num_offset = h->poc.frame_num_offset;
451  h->poc.prev_frame_num = h->poc.frame_num;
452 
453  h->recovery_frame = h1->recovery_frame;
454 
455  return err;
456 }
457 
459  const AVCodecContext *src)
460 {
461  H264Context *h = dst->priv_data;
462  const H264Context *h1 = src->priv_data;
463 
464  h->is_avc = h1->is_avc;
465  h->nal_length_size = h1->nal_length_size;
466 
467  return 0;
468 }
469 
471 {
472  H264Picture *pic;
473  int i, ret;
474  const int pixel_shift = h->pixel_shift;
475 
476  if (!ff_thread_can_start_frame(h->avctx)) {
477  av_log(h->avctx, AV_LOG_ERROR, "Attempt to start a frame outside SETUP state\n");
478  return -1;
479  }
480 
482  h->cur_pic_ptr = NULL;
483 
485  if (i < 0) {
486  av_log(h->avctx, AV_LOG_ERROR, "no frame buffer available\n");
487  return i;
488  }
489  pic = &h->DPB[i];
490 
491  pic->reference = h->droppable ? 0 : h->picture_structure;
492  pic->f->coded_picture_number = h->coded_picture_number++;
493  pic->field_picture = h->picture_structure != PICT_FRAME;
494  pic->frame_num = h->poc.frame_num;
495  /*
496  * Zero key_frame here; IDR markings per slice in frame or fields are ORed
497  * in later.
498  * See decode_nal_units().
499  */
500  pic->f->key_frame = 0;
501  pic->mmco_reset = 0;
502  pic->recovered = 0;
503  pic->invalid_gap = 0;
504  pic->sei_recovery_frame_cnt = h->sei.recovery_point.recovery_frame_cnt;
505 
506  pic->f->pict_type = h->slice_ctx[0].slice_type;
507 
508  pic->f->crop_left = h->crop_left;
509  pic->f->crop_right = h->crop_right;
510  pic->f->crop_top = h->crop_top;
511  pic->f->crop_bottom = h->crop_bottom;
512 
513  pic->needs_fg = h->sei.common.film_grain_characteristics.present && !h->avctx->hwaccel &&
514  !(h->avctx->export_side_data & AV_CODEC_EXPORT_DATA_FILM_GRAIN);
515 
516  if ((ret = alloc_picture(h, pic)) < 0)
517  return ret;
518 
519  h->cur_pic_ptr = pic;
520  ff_h264_unref_picture(h, &h->cur_pic);
521  if (CONFIG_ERROR_RESILIENCE) {
522  ff_h264_set_erpic(&h->er.cur_pic, NULL);
523  }
524 
525  if ((ret = ff_h264_ref_picture(h, &h->cur_pic, h->cur_pic_ptr)) < 0)
526  return ret;
527 
528  for (i = 0; i < h->nb_slice_ctx; i++) {
529  h->slice_ctx[i].linesize = h->cur_pic_ptr->f->linesize[0];
530  h->slice_ctx[i].uvlinesize = h->cur_pic_ptr->f->linesize[1];
531  }
532 
533  if (CONFIG_ERROR_RESILIENCE && h->enable_er) {
534  ff_er_frame_start(&h->er);
535  ff_h264_set_erpic(&h->er.last_pic, NULL);
536  ff_h264_set_erpic(&h->er.next_pic, NULL);
537  }
538 
539  for (i = 0; i < 16; i++) {
540  h->block_offset[i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
541  h->block_offset[48 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
542  }
543  for (i = 0; i < 16; i++) {
544  h->block_offset[16 + i] =
545  h->block_offset[32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
546  h->block_offset[48 + 16 + i] =
547  h->block_offset[48 + 32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
548  }
549 
550  /* We mark the current picture as non-reference after allocating it, so
551  * that if we break out due to an error it can be released automatically
552  * in the next ff_mpv_frame_start().
553  */
554  h->cur_pic_ptr->reference = 0;
555 
556  h->cur_pic_ptr->field_poc[0] = h->cur_pic_ptr->field_poc[1] = INT_MAX;
557 
558  h->next_output_pic = NULL;
559 
560  h->postpone_filter = 0;
561 
562  h->mb_aff_frame = h->ps.sps->mb_aff && (h->picture_structure == PICT_FRAME);
563 
564  if (h->sei.common.unregistered.x264_build >= 0)
565  h->x264_build = h->sei.common.unregistered.x264_build;
566 
567  assert(h->cur_pic_ptr->long_ref == 0);
568 
569  return 0;
570 }
571 
573  uint8_t *src_y,
574  uint8_t *src_cb, uint8_t *src_cr,
575  int linesize, int uvlinesize,
576  int simple)
577 {
578  uint8_t *top_border;
579  int top_idx = 1;
580  const int pixel_shift = h->pixel_shift;
581  int chroma444 = CHROMA444(h);
582  int chroma422 = CHROMA422(h);
583 
584  src_y -= linesize;
585  src_cb -= uvlinesize;
586  src_cr -= uvlinesize;
587 
588  if (!simple && FRAME_MBAFF(h)) {
589  if (sl->mb_y & 1) {
590  if (!MB_MBAFF(sl)) {
591  top_border = sl->top_borders[0][sl->mb_x];
592  AV_COPY128(top_border, src_y + 15 * linesize);
593  if (pixel_shift)
594  AV_COPY128(top_border + 16, src_y + 15 * linesize + 16);
595  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
596  if (chroma444) {
597  if (pixel_shift) {
598  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
599  AV_COPY128(top_border + 48, src_cb + 15 * uvlinesize + 16);
600  AV_COPY128(top_border + 64, src_cr + 15 * uvlinesize);
601  AV_COPY128(top_border + 80, src_cr + 15 * uvlinesize + 16);
602  } else {
603  AV_COPY128(top_border + 16, src_cb + 15 * uvlinesize);
604  AV_COPY128(top_border + 32, src_cr + 15 * uvlinesize);
605  }
606  } else if (chroma422) {
607  if (pixel_shift) {
608  AV_COPY128(top_border + 32, src_cb + 15 * uvlinesize);
609  AV_COPY128(top_border + 48, src_cr + 15 * uvlinesize);
610  } else {
611  AV_COPY64(top_border + 16, src_cb + 15 * uvlinesize);
612  AV_COPY64(top_border + 24, src_cr + 15 * uvlinesize);
613  }
614  } else {
615  if (pixel_shift) {
616  AV_COPY128(top_border + 32, src_cb + 7 * uvlinesize);
617  AV_COPY128(top_border + 48, src_cr + 7 * uvlinesize);
618  } else {
619  AV_COPY64(top_border + 16, src_cb + 7 * uvlinesize);
620  AV_COPY64(top_border + 24, src_cr + 7 * uvlinesize);
621  }
622  }
623  }
624  }
625  } else if (MB_MBAFF(sl)) {
626  top_idx = 0;
627  } else
628  return;
629  }
630 
631  top_border = sl->top_borders[top_idx][sl->mb_x];
632  /* There are two lines saved, the line above the top macroblock
633  * of a pair, and the line above the bottom macroblock. */
634  AV_COPY128(top_border, src_y + 16 * linesize);
635  if (pixel_shift)
636  AV_COPY128(top_border + 16, src_y + 16 * linesize + 16);
637 
638  if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
639  if (chroma444) {
640  if (pixel_shift) {
641  AV_COPY128(top_border + 32, src_cb + 16 * linesize);
642  AV_COPY128(top_border + 48, src_cb + 16 * linesize + 16);
643  AV_COPY128(top_border + 64, src_cr + 16 * linesize);
644  AV_COPY128(top_border + 80, src_cr + 16 * linesize + 16);
645  } else {
646  AV_COPY128(top_border + 16, src_cb + 16 * linesize);
647  AV_COPY128(top_border + 32, src_cr + 16 * linesize);
648  }
649  } else if (chroma422) {
650  if (pixel_shift) {
651  AV_COPY128(top_border + 32, src_cb + 16 * uvlinesize);
652  AV_COPY128(top_border + 48, src_cr + 16 * uvlinesize);
653  } else {
654  AV_COPY64(top_border + 16, src_cb + 16 * uvlinesize);
655  AV_COPY64(top_border + 24, src_cr + 16 * uvlinesize);
656  }
657  } else {
658  if (pixel_shift) {
659  AV_COPY128(top_border + 32, src_cb + 8 * uvlinesize);
660  AV_COPY128(top_border + 48, src_cr + 8 * uvlinesize);
661  } else {
662  AV_COPY64(top_border + 16, src_cb + 8 * uvlinesize);
663  AV_COPY64(top_border + 24, src_cr + 8 * uvlinesize);
664  }
665  }
666  }
667 }
668 
669 /**
670  * Initialize implicit_weight table.
671  * @param field 0/1 initialize the weight for interlaced MBAFF
672  * -1 initializes the rest
673  */
675 {
676  int ref0, ref1, i, cur_poc, ref_start, ref_count0, ref_count1;
677 
678  for (i = 0; i < 2; i++) {
679  sl->pwt.luma_weight_flag[i] = 0;
680  sl->pwt.chroma_weight_flag[i] = 0;
681  }
682 
683  if (field < 0) {
684  if (h->picture_structure == PICT_FRAME) {
685  cur_poc = h->cur_pic_ptr->poc;
686  } else {
687  cur_poc = h->cur_pic_ptr->field_poc[h->picture_structure - 1];
688  }
689  if (sl->ref_count[0] == 1 && sl->ref_count[1] == 1 && !FRAME_MBAFF(h) &&
690  sl->ref_list[0][0].poc + (int64_t)sl->ref_list[1][0].poc == 2LL * cur_poc) {
691  sl->pwt.use_weight = 0;
692  sl->pwt.use_weight_chroma = 0;
693  return;
694  }
695  ref_start = 0;
696  ref_count0 = sl->ref_count[0];
697  ref_count1 = sl->ref_count[1];
698  } else {
699  cur_poc = h->cur_pic_ptr->field_poc[field];
700  ref_start = 16;
701  ref_count0 = 16 + 2 * sl->ref_count[0];
702  ref_count1 = 16 + 2 * sl->ref_count[1];
703  }
704 
705  sl->pwt.use_weight = 2;
706  sl->pwt.use_weight_chroma = 2;
707  sl->pwt.luma_log2_weight_denom = 5;
709 
710  for (ref0 = ref_start; ref0 < ref_count0; ref0++) {
711  int64_t poc0 = sl->ref_list[0][ref0].poc;
712  for (ref1 = ref_start; ref1 < ref_count1; ref1++) {
713  int w = 32;
714  if (!sl->ref_list[0][ref0].parent->long_ref && !sl->ref_list[1][ref1].parent->long_ref) {
715  int poc1 = sl->ref_list[1][ref1].poc;
716  int td = av_clip_int8(poc1 - poc0);
717  if (td) {
718  int tb = av_clip_int8(cur_poc - poc0);
719  int tx = (16384 + (FFABS(td) >> 1)) / td;
720  int dist_scale_factor = (tb * tx + 32) >> 8;
721  if (dist_scale_factor >= -64 && dist_scale_factor <= 128)
722  w = 64 - dist_scale_factor;
723  }
724  }
725  if (field < 0) {
726  sl->pwt.implicit_weight[ref0][ref1][0] =
727  sl->pwt.implicit_weight[ref0][ref1][1] = w;
728  } else {
729  sl->pwt.implicit_weight[ref0][ref1][field] = w;
730  }
731  }
732  }
733 }
734 
735 /**
736  * initialize scan tables
737  */
739 {
740  int i;
741  for (i = 0; i < 16; i++) {
742 #define TRANSPOSE(x) ((x) >> 2) | (((x) << 2) & 0xF)
743  h->zigzag_scan[i] = TRANSPOSE(ff_zigzag_scan[i]);
744  h->field_scan[i] = TRANSPOSE(field_scan[i]);
745 #undef TRANSPOSE
746  }
747  for (i = 0; i < 64; i++) {
748 #define TRANSPOSE(x) ((x) >> 3) | (((x) & 7) << 3)
749  h->zigzag_scan8x8[i] = TRANSPOSE(ff_zigzag_direct[i]);
750  h->zigzag_scan8x8_cavlc[i] = TRANSPOSE(zigzag_scan8x8_cavlc[i]);
751  h->field_scan8x8[i] = TRANSPOSE(field_scan8x8[i]);
752  h->field_scan8x8_cavlc[i] = TRANSPOSE(field_scan8x8_cavlc[i]);
753 #undef TRANSPOSE
754  }
755  if (h->ps.sps->transform_bypass) { // FIXME same ugly
756  memcpy(h->zigzag_scan_q0 , ff_zigzag_scan , sizeof(h->zigzag_scan_q0 ));
757  memcpy(h->zigzag_scan8x8_q0 , ff_zigzag_direct , sizeof(h->zigzag_scan8x8_q0 ));
758  memcpy(h->zigzag_scan8x8_cavlc_q0 , zigzag_scan8x8_cavlc , sizeof(h->zigzag_scan8x8_cavlc_q0));
759  memcpy(h->field_scan_q0 , field_scan , sizeof(h->field_scan_q0 ));
760  memcpy(h->field_scan8x8_q0 , field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
761  memcpy(h->field_scan8x8_cavlc_q0 , field_scan8x8_cavlc , sizeof(h->field_scan8x8_cavlc_q0 ));
762  } else {
763  memcpy(h->zigzag_scan_q0 , h->zigzag_scan , sizeof(h->zigzag_scan_q0 ));
764  memcpy(h->zigzag_scan8x8_q0 , h->zigzag_scan8x8 , sizeof(h->zigzag_scan8x8_q0 ));
765  memcpy(h->zigzag_scan8x8_cavlc_q0 , h->zigzag_scan8x8_cavlc , sizeof(h->zigzag_scan8x8_cavlc_q0));
766  memcpy(h->field_scan_q0 , h->field_scan , sizeof(h->field_scan_q0 ));
767  memcpy(h->field_scan8x8_q0 , h->field_scan8x8 , sizeof(h->field_scan8x8_q0 ));
768  memcpy(h->field_scan8x8_cavlc_q0 , h->field_scan8x8_cavlc , sizeof(h->field_scan8x8_cavlc_q0 ));
769  }
770 }
771 
772 static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
773 {
774 #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
775  (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
776  CONFIG_H264_NVDEC_HWACCEL + \
777  CONFIG_H264_VAAPI_HWACCEL + \
778  CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
779  CONFIG_H264_VDPAU_HWACCEL)
780  enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
781  const enum AVPixelFormat *choices = pix_fmts;
782  int i;
783 
784  switch (h->ps.sps->bit_depth_luma) {
785  case 9:
786  if (CHROMA444(h)) {
787  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
788  *fmt++ = AV_PIX_FMT_GBRP9;
789  } else
790  *fmt++ = AV_PIX_FMT_YUV444P9;
791  } else if (CHROMA422(h))
792  *fmt++ = AV_PIX_FMT_YUV422P9;
793  else
794  *fmt++ = AV_PIX_FMT_YUV420P9;
795  break;
796  case 10:
797 #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
798  if (h->avctx->colorspace != AVCOL_SPC_RGB)
799  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
800 #endif
801  if (CHROMA444(h)) {
802  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
803  *fmt++ = AV_PIX_FMT_GBRP10;
804  } else
805  *fmt++ = AV_PIX_FMT_YUV444P10;
806  } else if (CHROMA422(h))
807  *fmt++ = AV_PIX_FMT_YUV422P10;
808  else
809  *fmt++ = AV_PIX_FMT_YUV420P10;
810  break;
811  case 12:
812  if (CHROMA444(h)) {
813  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
814  *fmt++ = AV_PIX_FMT_GBRP12;
815  } else
816  *fmt++ = AV_PIX_FMT_YUV444P12;
817  } else if (CHROMA422(h))
818  *fmt++ = AV_PIX_FMT_YUV422P12;
819  else
820  *fmt++ = AV_PIX_FMT_YUV420P12;
821  break;
822  case 14:
823  if (CHROMA444(h)) {
824  if (h->avctx->colorspace == AVCOL_SPC_RGB) {
825  *fmt++ = AV_PIX_FMT_GBRP14;
826  } else
827  *fmt++ = AV_PIX_FMT_YUV444P14;
828  } else if (CHROMA422(h))
829  *fmt++ = AV_PIX_FMT_YUV422P14;
830  else
831  *fmt++ = AV_PIX_FMT_YUV420P14;
832  break;
833  case 8:
834 #if CONFIG_H264_VDPAU_HWACCEL
835  *fmt++ = AV_PIX_FMT_VDPAU;
836 #endif
837 #if CONFIG_H264_NVDEC_HWACCEL
838  *fmt++ = AV_PIX_FMT_CUDA;
839 #endif
840 #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
841  if (h->avctx->colorspace != AVCOL_SPC_RGB)
842  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
843 #endif
844  if (CHROMA444(h)) {
845  if (h->avctx->colorspace == AVCOL_SPC_RGB)
846  *fmt++ = AV_PIX_FMT_GBRP;
847  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
848  *fmt++ = AV_PIX_FMT_YUVJ444P;
849  else
850  *fmt++ = AV_PIX_FMT_YUV444P;
851  } else if (CHROMA422(h)) {
852  if (h->avctx->color_range == AVCOL_RANGE_JPEG)
853  *fmt++ = AV_PIX_FMT_YUVJ422P;
854  else
855  *fmt++ = AV_PIX_FMT_YUV422P;
856  } else {
857 #if CONFIG_H264_DXVA2_HWACCEL
858  *fmt++ = AV_PIX_FMT_DXVA2_VLD;
859 #endif
860 #if CONFIG_H264_D3D11VA_HWACCEL
861  *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
862  *fmt++ = AV_PIX_FMT_D3D11;
863 #endif
864 #if CONFIG_H264_VAAPI_HWACCEL
865  *fmt++ = AV_PIX_FMT_VAAPI;
866 #endif
867  if (h->avctx->codec->pix_fmts)
868  choices = h->avctx->codec->pix_fmts;
869  else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
870  *fmt++ = AV_PIX_FMT_YUVJ420P;
871  else
872  *fmt++ = AV_PIX_FMT_YUV420P;
873  }
874  break;
875  default:
876  av_log(h->avctx, AV_LOG_ERROR,
877  "Unsupported bit depth %d\n", h->ps.sps->bit_depth_luma);
878  return AVERROR_INVALIDDATA;
879  }
880 
881  *fmt = AV_PIX_FMT_NONE;
882 
883  for (i=0; choices[i] != AV_PIX_FMT_NONE; i++)
884  if (choices[i] == h->avctx->pix_fmt && !force_callback)
885  return choices[i];
886  return ff_thread_get_format(h->avctx, choices);
887 }
888 
889 /* export coded and cropped frame dimensions to AVCodecContext */
891 {
892  const SPS *sps = (const SPS*)h->ps.sps;
893  int cr = sps->crop_right;
894  int cl = sps->crop_left;
895  int ct = sps->crop_top;
896  int cb = sps->crop_bottom;
897  int width = h->width - (cr + cl);
898  int height = h->height - (ct + cb);
899  av_assert0(sps->crop_right + sps->crop_left < (unsigned)h->width);
900  av_assert0(sps->crop_top + sps->crop_bottom < (unsigned)h->height);
901 
902  /* handle container cropping */
903  if (h->width_from_caller > 0 && h->height_from_caller > 0 &&
904  !sps->crop_top && !sps->crop_left &&
905  FFALIGN(h->width_from_caller, 16) == FFALIGN(width, 16) &&
906  FFALIGN(h->height_from_caller, 16) == FFALIGN(height, 16) &&
907  h->width_from_caller <= width &&
908  h->height_from_caller <= height) {
909  width = h->width_from_caller;
910  height = h->height_from_caller;
911  cl = 0;
912  ct = 0;
913  cr = h->width - width;
914  cb = h->height - height;
915  } else {
916  h->width_from_caller = 0;
917  h->height_from_caller = 0;
918  }
919 
920  h->avctx->coded_width = h->width;
921  h->avctx->coded_height = h->height;
922  h->avctx->width = width;
923  h->avctx->height = height;
924  h->crop_right = cr;
925  h->crop_left = cl;
926  h->crop_top = ct;
927  h->crop_bottom = cb;
928 }
929 
931 {
932  const SPS *sps = h->ps.sps;
933  int i, ret;
934 
935  if (!sps) {
937  goto fail;
938  }
939 
940  ff_set_sar(h->avctx, sps->vui.sar);
941  av_pix_fmt_get_chroma_sub_sample(h->avctx->pix_fmt,
942  &h->chroma_x_shift, &h->chroma_y_shift);
943 
944  if (sps->timing_info_present_flag) {
945  int64_t den = sps->time_scale;
946  if (h->x264_build < 44U)
947  den *= 2;
948  av_reduce(&h->avctx->framerate.den, &h->avctx->framerate.num,
949  sps->num_units_in_tick * h->avctx->ticks_per_frame, den, 1 << 30);
950  }
951 
953 
954  h->first_field = 0;
955  h->prev_interlaced_frame = 1;
956 
959  if (ret < 0) {
960  av_log(h->avctx, AV_LOG_ERROR, "Could not allocate memory\n");
961  goto fail;
962  }
963 
964  if (sps->bit_depth_luma < 8 || sps->bit_depth_luma > 14 ||
965  sps->bit_depth_luma == 11 || sps->bit_depth_luma == 13
966  ) {
967  av_log(h->avctx, AV_LOG_ERROR, "Unsupported bit depth %d\n",
968  sps->bit_depth_luma);
970  goto fail;
971  }
972 
973  h->cur_bit_depth_luma =
974  h->avctx->bits_per_raw_sample = sps->bit_depth_luma;
975  h->cur_chroma_format_idc = sps->chroma_format_idc;
976  h->pixel_shift = sps->bit_depth_luma > 8;
977  h->chroma_format_idc = sps->chroma_format_idc;
978  h->bit_depth_luma = sps->bit_depth_luma;
979 
980  ff_h264dsp_init(&h->h264dsp, sps->bit_depth_luma,
981  sps->chroma_format_idc);
982  ff_h264chroma_init(&h->h264chroma, sps->bit_depth_chroma);
983  ff_h264qpel_init(&h->h264qpel, sps->bit_depth_luma);
984  ff_h264_pred_init(&h->hpc, AV_CODEC_ID_H264, sps->bit_depth_luma,
985  sps->chroma_format_idc);
986  ff_videodsp_init(&h->vdsp, sps->bit_depth_luma);
987 
988  if (!HAVE_THREADS || !(h->avctx->active_thread_type & FF_THREAD_SLICE)) {
989  ff_h264_slice_context_init(h, &h->slice_ctx[0]);
990  } else {
991  for (i = 0; i < h->nb_slice_ctx; i++) {
992  H264SliceContext *sl = &h->slice_ctx[i];
993 
994  sl->h264 = h;
995  sl->intra4x4_pred_mode = h->intra4x4_pred_mode + i * 8 * 2 * h->mb_stride;
996  sl->mvd_table[0] = h->mvd_table[0] + i * 8 * 2 * h->mb_stride;
997  sl->mvd_table[1] = h->mvd_table[1] + i * 8 * 2 * h->mb_stride;
998 
1000  }
1001  }
1002 
1003  h->context_initialized = 1;
1004 
1005  return 0;
1006 fail:
1008  h->context_initialized = 0;
1009  return ret;
1010 }
1011 
1013 {
1014  switch (a) {
1018  default:
1019  return a;
1020  }
1021 }
1022 
1023 static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
1024 {
1025  const SPS *sps;
1026  int needs_reinit = 0, must_reinit, ret;
1027 
1028  if (first_slice) {
1029  av_buffer_unref(&h->ps.pps_ref);
1030  h->ps.pps = NULL;
1031  h->ps.pps_ref = av_buffer_ref(h->ps.pps_list[sl->pps_id]);
1032  if (!h->ps.pps_ref)
1033  return AVERROR(ENOMEM);
1034  h->ps.pps = (const PPS*)h->ps.pps_ref->data;
1035  }
1036 
1037  if (h->ps.sps != h->ps.pps->sps) {
1038  h->ps.sps = (const SPS*)h->ps.pps->sps;
1039 
1040  if (h->mb_width != h->ps.sps->mb_width ||
1041  h->mb_height != h->ps.sps->mb_height ||
1042  h->cur_bit_depth_luma != h->ps.sps->bit_depth_luma ||
1043  h->cur_chroma_format_idc != h->ps.sps->chroma_format_idc
1044  )
1045  needs_reinit = 1;
1046 
1047  if (h->bit_depth_luma != h->ps.sps->bit_depth_luma ||
1048  h->chroma_format_idc != h->ps.sps->chroma_format_idc)
1049  needs_reinit = 1;
1050  }
1051  sps = h->ps.sps;
1052 
1053  must_reinit = (h->context_initialized &&
1054  ( 16*sps->mb_width != h->avctx->coded_width
1055  || 16*sps->mb_height != h->avctx->coded_height
1056  || h->cur_bit_depth_luma != sps->bit_depth_luma
1057  || h->cur_chroma_format_idc != sps->chroma_format_idc
1058  || h->mb_width != sps->mb_width
1059  || h->mb_height != sps->mb_height
1060  ));
1061  if (h->avctx->pix_fmt == AV_PIX_FMT_NONE
1062  || (non_j_pixfmt(h->avctx->pix_fmt) != non_j_pixfmt(get_pixel_format(h, 0))))
1063  must_reinit = 1;
1064 
1065  if (first_slice && av_cmp_q(sps->vui.sar, h->avctx->sample_aspect_ratio))
1066  must_reinit = 1;
1067 
1068  if (!h->setup_finished) {
1069  h->avctx->profile = ff_h264_get_profile(sps);
1070  h->avctx->level = sps->level_idc;
1071  h->avctx->refs = sps->ref_frame_count;
1072 
1073  h->mb_width = sps->mb_width;
1074  h->mb_height = sps->mb_height;
1075  h->mb_num = h->mb_width * h->mb_height;
1076  h->mb_stride = h->mb_width + 1;
1077 
1078  h->b_stride = h->mb_width * 4;
1079 
1080  h->chroma_y_shift = sps->chroma_format_idc <= 1; // 400 uses yuv420p
1081 
1082  h->width = 16 * h->mb_width;
1083  h->height = 16 * h->mb_height;
1084 
1085  init_dimensions(h);
1086 
1087  if (sps->vui.video_signal_type_present_flag) {
1088  h->avctx->color_range = sps->vui.video_full_range_flag > 0 ? AVCOL_RANGE_JPEG
1089  : AVCOL_RANGE_MPEG;
1090  if (sps->vui.colour_description_present_flag) {
1091  if (h->avctx->colorspace != sps->vui.matrix_coeffs)
1092  needs_reinit = 1;
1093  h->avctx->color_primaries = sps->vui.colour_primaries;
1094  h->avctx->color_trc = sps->vui.transfer_characteristics;
1095  h->avctx->colorspace = sps->vui.matrix_coeffs;
1096  }
1097  }
1098 
1099  if (h->sei.common.alternative_transfer.present &&
1100  av_color_transfer_name(h->sei.common.alternative_transfer.preferred_transfer_characteristics) &&
1101  h->sei.common.alternative_transfer.preferred_transfer_characteristics != AVCOL_TRC_UNSPECIFIED) {
1102  h->avctx->color_trc = h->sei.common.alternative_transfer.preferred_transfer_characteristics;
1103  }
1104  }
1105  h->avctx->chroma_sample_location = sps->vui.chroma_location;
1106 
1107  if (!h->context_initialized || must_reinit || needs_reinit) {
1108  int flush_changes = h->context_initialized;
1109  h->context_initialized = 0;
1110  if (sl != h->slice_ctx) {
1111  av_log(h->avctx, AV_LOG_ERROR,
1112  "changing width %d -> %d / height %d -> %d on "
1113  "slice %d\n",
1114  h->width, h->avctx->coded_width,
1115  h->height, h->avctx->coded_height,
1116  h->current_slice + 1);
1117  return AVERROR_INVALIDDATA;
1118  }
1119 
1120  av_assert1(first_slice);
1121 
1122  if (flush_changes)
1124 
1125  if ((ret = get_pixel_format(h, 1)) < 0)
1126  return ret;
1127  h->avctx->pix_fmt = ret;
1128 
1129  av_log(h->avctx, AV_LOG_VERBOSE, "Reinit context to %dx%d, "
1130  "pix_fmt: %s\n", h->width, h->height, av_get_pix_fmt_name(h->avctx->pix_fmt));
1131 
1132  if ((ret = h264_slice_header_init(h)) < 0) {
1133  av_log(h->avctx, AV_LOG_ERROR,
1134  "h264_slice_header_init() failed\n");
1135  return ret;
1136  }
1137  }
1138 
1139  return 0;
1140 }
1141 
1143 {
1144  const SPS *sps = h->ps.sps;
1145  H264Picture *cur = h->cur_pic_ptr;
1146  AVFrame *out = cur->f;
1147  int ret;
1148 
1149  out->interlaced_frame = 0;
1150  out->repeat_pict = 0;
1151 
1152  /* Signal interlacing information externally. */
1153  /* Prioritize picture timing SEI information over used
1154  * decoding process if it exists. */
1155  if (h->sei.picture_timing.present) {
1156  int ret = ff_h264_sei_process_picture_timing(&h->sei.picture_timing, sps,
1157  h->avctx);
1158  if (ret < 0) {
1159  av_log(h->avctx, AV_LOG_ERROR, "Error processing a picture timing SEI\n");
1160  if (h->avctx->err_recognition & AV_EF_EXPLODE)
1161  return ret;
1162  h->sei.picture_timing.present = 0;
1163  }
1164  }
1165 
1166  if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
1167  H264SEIPictureTiming *pt = &h->sei.picture_timing;
1168  switch (pt->pic_struct) {
1170  break;
1173  out->interlaced_frame = 1;
1174  break;
1178  out->interlaced_frame = 1;
1179  else
1180  // try to flag soft telecine progressive
1181  out->interlaced_frame = h->prev_interlaced_frame;
1182  break;
1185  /* Signal the possibility of telecined film externally
1186  * (pic_struct 5,6). From these hints, let the applications
1187  * decide if they apply deinterlacing. */
1188  out->repeat_pict = 1;
1189  break;
1191  out->repeat_pict = 2;
1192  break;
1194  out->repeat_pict = 4;
1195  break;
1196  }
1197 
1198  if ((pt->ct_type & 3) &&
1199  pt->pic_struct <= H264_SEI_PIC_STRUCT_BOTTOM_TOP)
1200  out->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
1201  } else {
1202  /* Derive interlacing flag from used decoding process. */
1203  out->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
1204  }
1205  h->prev_interlaced_frame = out->interlaced_frame;
1206 
1207  if (cur->field_poc[0] != cur->field_poc[1]) {
1208  /* Derive top_field_first from field pocs. */
1209  out->top_field_first = cur->field_poc[0] < cur->field_poc[1];
1210  } else {
1211  if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
1212  /* Use picture timing SEI information. Even if it is a
1213  * information of a past frame, better than nothing. */
1214  if (h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM ||
1215  h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
1216  out->top_field_first = 1;
1217  else
1218  out->top_field_first = 0;
1219  } else if (out->interlaced_frame) {
1220  /* Default to top field first when pic_struct_present_flag
1221  * is not set but interlaced frame detected */
1222  out->top_field_first = 1;
1223  } else {
1224  /* Most likely progressive */
1225  out->top_field_first = 0;
1226  }
1227  }
1228 
1229  ret = ff_h2645_sei_to_frame(out, &h->sei.common, AV_CODEC_ID_H264, h->avctx,
1230  &sps->vui, sps->bit_depth_luma, sps->bit_depth_chroma,
1231  cur->poc + (h->poc_offset << 5));
1232  if (ret < 0)
1233  return ret;
1234 
1235  if (h->sei.picture_timing.timecode_cnt > 0) {
1236  uint32_t *tc_sd;
1237  char tcbuf[AV_TIMECODE_STR_SIZE];
1238 
1241  sizeof(uint32_t)*4);
1242  if (!tcside)
1243  return AVERROR(ENOMEM);
1244 
1245  tc_sd = (uint32_t*)tcside->data;
1246  tc_sd[0] = h->sei.picture_timing.timecode_cnt;
1247 
1248  for (int i = 0; i < tc_sd[0]; i++) {
1249  int drop = h->sei.picture_timing.timecode[i].dropframe;
1250  int hh = h->sei.picture_timing.timecode[i].hours;
1251  int mm = h->sei.picture_timing.timecode[i].minutes;
1252  int ss = h->sei.picture_timing.timecode[i].seconds;
1253  int ff = h->sei.picture_timing.timecode[i].frame;
1254 
1255  tc_sd[i + 1] = av_timecode_get_smpte(h->avctx->framerate, drop, hh, mm, ss, ff);
1256  av_timecode_make_smpte_tc_string2(tcbuf, h->avctx->framerate, tc_sd[i + 1], 0, 0);
1257  av_dict_set(&out->metadata, "timecode", tcbuf, 0);
1258  }
1259  h->sei.picture_timing.timecode_cnt = 0;
1260  }
1261 
1262  return 0;
1263 }
1264 
1266 {
1267  const SPS *sps = h->ps.sps;
1268  H264Picture *out = h->cur_pic_ptr;
1269  H264Picture *cur = h->cur_pic_ptr;
1270  int i, pics, out_of_order, out_idx;
1271 
1272  cur->mmco_reset = h->mmco_reset;
1273  h->mmco_reset = 0;
1274 
1275  if (sps->bitstream_restriction_flag ||
1276  h->avctx->strict_std_compliance >= FF_COMPLIANCE_STRICT) {
1277  h->avctx->has_b_frames = FFMAX(h->avctx->has_b_frames, sps->num_reorder_frames);
1278  }
1279 
1280  for (i = 0; 1; i++) {
1281  if(i == H264_MAX_DPB_FRAMES || cur->poc < h->last_pocs[i]){
1282  if(i)
1283  h->last_pocs[i-1] = cur->poc;
1284  break;
1285  } else if(i) {
1286  h->last_pocs[i-1]= h->last_pocs[i];
1287  }
1288  }
1289  out_of_order = H264_MAX_DPB_FRAMES - i;
1290  if( cur->f->pict_type == AV_PICTURE_TYPE_B
1291  || (h->last_pocs[H264_MAX_DPB_FRAMES-2] > INT_MIN && h->last_pocs[H264_MAX_DPB_FRAMES-1] - (int64_t)h->last_pocs[H264_MAX_DPB_FRAMES-2] > 2))
1292  out_of_order = FFMAX(out_of_order, 1);
1293  if (out_of_order == H264_MAX_DPB_FRAMES) {
1294  av_log(h->avctx, AV_LOG_VERBOSE, "Invalid POC %d<%d\n", cur->poc, h->last_pocs[0]);
1295  for (i = 1; i < H264_MAX_DPB_FRAMES; i++)
1296  h->last_pocs[i] = INT_MIN;
1297  h->last_pocs[0] = cur->poc;
1298  cur->mmco_reset = 1;
1299  } else if(h->avctx->has_b_frames < out_of_order && !sps->bitstream_restriction_flag){
1300  int loglevel = h->avctx->frame_number > 1 ? AV_LOG_WARNING : AV_LOG_VERBOSE;
1301  av_log(h->avctx, loglevel, "Increasing reorder buffer to %d\n", out_of_order);
1302  h->avctx->has_b_frames = out_of_order;
1303  }
1304 
1305  pics = 0;
1306  while (h->delayed_pic[pics])
1307  pics++;
1308 
1310 
1311  h->delayed_pic[pics++] = cur;
1312  if (cur->reference == 0)
1313  cur->reference = DELAYED_PIC_REF;
1314 
1315  out = h->delayed_pic[0];
1316  out_idx = 0;
1317  for (i = 1; h->delayed_pic[i] &&
1318  !h->delayed_pic[i]->f->key_frame &&
1319  !h->delayed_pic[i]->mmco_reset;
1320  i++)
1321  if (h->delayed_pic[i]->poc < out->poc) {
1322  out = h->delayed_pic[i];
1323  out_idx = i;
1324  }
1325  if (h->avctx->has_b_frames == 0 &&
1326  (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset))
1327  h->next_outputed_poc = INT_MIN;
1328  out_of_order = out->poc < h->next_outputed_poc;
1329 
1330  if (out_of_order || pics > h->avctx->has_b_frames) {
1331  out->reference &= ~DELAYED_PIC_REF;
1332  for (i = out_idx; h->delayed_pic[i]; i++)
1333  h->delayed_pic[i] = h->delayed_pic[i + 1];
1334  }
1335  if (!out_of_order && pics > h->avctx->has_b_frames) {
1336  h->next_output_pic = out;
1337  if (out_idx == 0 && h->delayed_pic[0] && (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset)) {
1338  h->next_outputed_poc = INT_MIN;
1339  } else
1340  h->next_outputed_poc = out->poc;
1341 
1342  if (out->recovered) {
1343  // We have reached an recovery point and all frames after it in
1344  // display order are "recovered".
1345  h->frame_recovered |= FRAME_RECOVERED_SEI;
1346  }
1347  out->recovered |= !!(h->frame_recovered & FRAME_RECOVERED_SEI);
1348 
1349  if (!out->recovered) {
1350  if (!(h->avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) &&
1351  !(h->avctx->flags2 & AV_CODEC_FLAG2_SHOW_ALL)) {
1352  h->next_output_pic = NULL;
1353  } else {
1354  out->f->flags |= AV_FRAME_FLAG_CORRUPT;
1355  }
1356  }
1357  } else {
1358  av_log(h->avctx, AV_LOG_DEBUG, "no picture %s\n", out_of_order ? "ooo" : "");
1359  }
1360 
1361  return 0;
1362 }
1363 
1364 /* This function is called right after decoding the slice header for a first
1365  * slice in a field (or a frame). It decides whether we are decoding a new frame
1366  * or a second field in a pair and does the necessary setup.
1367  */
1369  const H2645NAL *nal, int first_slice)
1370 {
1371  int i;
1372  const SPS *sps;
1373 
1374  int last_pic_structure, last_pic_droppable, ret;
1375 
1376  ret = h264_init_ps(h, sl, first_slice);
1377  if (ret < 0)
1378  return ret;
1379 
1380  sps = h->ps.sps;
1381 
1382  if (sps && sps->bitstream_restriction_flag &&
1383  h->avctx->has_b_frames < sps->num_reorder_frames) {
1384  h->avctx->has_b_frames = sps->num_reorder_frames;
1385  }
1386 
1387  last_pic_droppable = h->droppable;
1388  last_pic_structure = h->picture_structure;
1389  h->droppable = (nal->ref_idc == 0);
1390  h->picture_structure = sl->picture_structure;
1391 
1392  h->poc.frame_num = sl->frame_num;
1393  h->poc.poc_lsb = sl->poc_lsb;
1394  h->poc.delta_poc_bottom = sl->delta_poc_bottom;
1395  h->poc.delta_poc[0] = sl->delta_poc[0];
1396  h->poc.delta_poc[1] = sl->delta_poc[1];
1397 
1398  if (nal->type == H264_NAL_IDR_SLICE)
1399  h->poc_offset = sl->idr_pic_id;
1400  else if (h->picture_intra_only)
1401  h->poc_offset = 0;
1402 
1403  /* Shorten frame num gaps so we don't have to allocate reference
1404  * frames just to throw them away */
1405  if (h->poc.frame_num != h->poc.prev_frame_num) {
1406  int unwrap_prev_frame_num = h->poc.prev_frame_num;
1407  int max_frame_num = 1 << sps->log2_max_frame_num;
1408 
1409  if (unwrap_prev_frame_num > h->poc.frame_num)
1410  unwrap_prev_frame_num -= max_frame_num;
1411 
1412  if ((h->poc.frame_num - unwrap_prev_frame_num) > sps->ref_frame_count) {
1413  unwrap_prev_frame_num = (h->poc.frame_num - sps->ref_frame_count) - 1;
1414  if (unwrap_prev_frame_num < 0)
1415  unwrap_prev_frame_num += max_frame_num;
1416 
1417  h->poc.prev_frame_num = unwrap_prev_frame_num;
1418  }
1419  }
1420 
1421  /* See if we have a decoded first field looking for a pair...
1422  * Here, we're using that to see if we should mark previously
1423  * decode frames as "finished".
1424  * We have to do that before the "dummy" in-between frame allocation,
1425  * since that can modify h->cur_pic_ptr. */
1426  if (h->first_field) {
1427  int last_field = last_pic_structure == PICT_BOTTOM_FIELD;
1428  av_assert0(h->cur_pic_ptr);
1429  av_assert0(h->cur_pic_ptr->f->buf[0]);
1430  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1431 
1432  /* Mark old field/frame as completed */
1433  if (h->cur_pic_ptr->tf.owner[last_field] == h->avctx) {
1434  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, last_field);
1435  }
1436 
1437  /* figure out if we have a complementary field pair */
1438  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1439  /* Previous field is unmatched. Don't display it, but let it
1440  * remain for reference if marked as such. */
1441  if (last_pic_structure != PICT_FRAME) {
1442  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
1443  last_pic_structure == PICT_TOP_FIELD);
1444  }
1445  } else {
1446  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1447  /* This and previous field were reference, but had
1448  * different frame_nums. Consider this field first in
1449  * pair. Throw away previous field except for reference
1450  * purposes. */
1451  if (last_pic_structure != PICT_FRAME) {
1452  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
1453  last_pic_structure == PICT_TOP_FIELD);
1454  }
1455  } else {
1456  /* Second field in complementary pair */
1457  if (!((last_pic_structure == PICT_TOP_FIELD &&
1458  h->picture_structure == PICT_BOTTOM_FIELD) ||
1459  (last_pic_structure == PICT_BOTTOM_FIELD &&
1460  h->picture_structure == PICT_TOP_FIELD))) {
1461  av_log(h->avctx, AV_LOG_ERROR,
1462  "Invalid field mode combination %d/%d\n",
1463  last_pic_structure, h->picture_structure);
1464  h->picture_structure = last_pic_structure;
1465  h->droppable = last_pic_droppable;
1466  return AVERROR_INVALIDDATA;
1467  } else if (last_pic_droppable != h->droppable) {
1468  avpriv_request_sample(h->avctx,
1469  "Found reference and non-reference fields in the same frame, which");
1470  h->picture_structure = last_pic_structure;
1471  h->droppable = last_pic_droppable;
1472  return AVERROR_PATCHWELCOME;
1473  }
1474  }
1475  }
1476  }
1477 
1478  while (h->poc.frame_num != h->poc.prev_frame_num && !h->first_field &&
1479  h->poc.frame_num != (h->poc.prev_frame_num + 1) % (1 << sps->log2_max_frame_num)) {
1480  const H264Picture *prev = h->short_ref_count ? h->short_ref[0] : NULL;
1481  av_log(h->avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n",
1482  h->poc.frame_num, h->poc.prev_frame_num);
1483  if (!sps->gaps_in_frame_num_allowed_flag)
1484  for(i=0; i<FF_ARRAY_ELEMS(h->last_pocs); i++)
1485  h->last_pocs[i] = INT_MIN;
1486  ret = h264_frame_start(h);
1487  if (ret < 0) {
1488  h->first_field = 0;
1489  return ret;
1490  }
1491 
1492  h->poc.prev_frame_num++;
1493  h->poc.prev_frame_num %= 1 << sps->log2_max_frame_num;
1494  h->cur_pic_ptr->frame_num = h->poc.prev_frame_num;
1495  h->cur_pic_ptr->invalid_gap = !sps->gaps_in_frame_num_allowed_flag;
1496  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
1497  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
1498 
1499  h->explicit_ref_marking = 0;
1501  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1502  return ret;
1503  /* Error concealment: If a ref is missing, copy the previous ref
1504  * in its place.
1505  * FIXME: Avoiding a memcpy would be nice, but ref handling makes
1506  * many assumptions about there being no actual duplicates.
1507  * FIXME: This does not copy padding for out-of-frame motion
1508  * vectors. Given we are concealing a lost frame, this probably
1509  * is not noticeable by comparison, but it should be fixed. */
1510  if (h->short_ref_count) {
1511  int c[4] = {
1512  1<<(h->ps.sps->bit_depth_luma-1),
1513  1<<(h->ps.sps->bit_depth_chroma-1),
1514  1<<(h->ps.sps->bit_depth_chroma-1),
1515  -1
1516  };
1517 
1518  if (prev &&
1519  h->short_ref[0]->f->width == prev->f->width &&
1520  h->short_ref[0]->f->height == prev->f->height &&
1521  h->short_ref[0]->f->format == prev->f->format) {
1522  ff_thread_await_progress(&prev->tf, INT_MAX, 0);
1523  if (prev->field_picture)
1524  ff_thread_await_progress(&prev->tf, INT_MAX, 1);
1525  ff_thread_release_ext_buffer(h->avctx, &h->short_ref[0]->tf);
1526  h->short_ref[0]->tf.f = h->short_ref[0]->f;
1527  ret = ff_thread_ref_frame(&h->short_ref[0]->tf, &prev->tf);
1528  if (ret < 0)
1529  return ret;
1530  h->short_ref[0]->poc = prev->poc + 2U;
1531  ff_thread_report_progress(&h->short_ref[0]->tf, INT_MAX, 0);
1532  if (h->short_ref[0]->field_picture)
1533  ff_thread_report_progress(&h->short_ref[0]->tf, INT_MAX, 1);
1534  } else if (!h->frame_recovered && !h->avctx->hwaccel)
1535  ff_color_frame(h->short_ref[0]->f, c);
1536  h->short_ref[0]->frame_num = h->poc.prev_frame_num;
1537  }
1538  }
1539 
1540  /* See if we have a decoded first field looking for a pair...
1541  * We're using that to see whether to continue decoding in that
1542  * frame, or to allocate a new one. */
1543  if (h->first_field) {
1544  av_assert0(h->cur_pic_ptr);
1545  av_assert0(h->cur_pic_ptr->f->buf[0]);
1546  assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
1547 
1548  /* figure out if we have a complementary field pair */
1549  if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
1550  /* Previous field is unmatched. Don't display it, but let it
1551  * remain for reference if marked as such. */
1552  h->missing_fields ++;
1553  h->cur_pic_ptr = NULL;
1554  h->first_field = FIELD_PICTURE(h);
1555  } else {
1556  h->missing_fields = 0;
1557  if (h->cur_pic_ptr->frame_num != h->poc.frame_num) {
1558  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
1559  h->picture_structure==PICT_BOTTOM_FIELD);
1560  /* This and the previous field had different frame_nums.
1561  * Consider this field first in pair. Throw away previous
1562  * one except for reference purposes. */
1563  h->first_field = 1;
1564  h->cur_pic_ptr = NULL;
1565  } else if (h->cur_pic_ptr->reference & DELAYED_PIC_REF) {
1566  /* This frame was already output, we cannot draw into it
1567  * anymore.
1568  */
1569  h->first_field = 1;
1570  h->cur_pic_ptr = NULL;
1571  } else {
1572  /* Second field in complementary pair */
1573  h->first_field = 0;
1574  }
1575  }
1576  } else {
1577  /* Frame or first field in a potentially complementary pair */
1578  h->first_field = FIELD_PICTURE(h);
1579  }
1580 
1581  if (!FIELD_PICTURE(h) || h->first_field) {
1582  if (h264_frame_start(h) < 0) {
1583  h->first_field = 0;
1584  return AVERROR_INVALIDDATA;
1585  }
1586  } else {
1587  int field = h->picture_structure == PICT_BOTTOM_FIELD;
1589  h->cur_pic_ptr->tf.owner[field] = h->avctx;
1590  }
1591  /* Some macroblocks can be accessed before they're available in case
1592  * of lost slices, MBAFF or threading. */
1593  if (FIELD_PICTURE(h)) {
1594  for(i = (h->picture_structure == PICT_BOTTOM_FIELD); i<h->mb_height; i++)
1595  memset(h->slice_table + i*h->mb_stride, -1, (h->mb_stride - (i+1==h->mb_height)) * sizeof(*h->slice_table));
1596  } else {
1597  memset(h->slice_table, -1,
1598  (h->mb_height * h->mb_stride - 1) * sizeof(*h->slice_table));
1599  }
1600 
1601  ret = ff_h264_init_poc(h->cur_pic_ptr->field_poc, &h->cur_pic_ptr->poc,
1602  h->ps.sps, &h->poc, h->picture_structure, nal->ref_idc);
1603  if (ret < 0)
1604  return ret;
1605 
1606  memcpy(h->mmco, sl->mmco, sl->nb_mmco * sizeof(*h->mmco));
1607  h->nb_mmco = sl->nb_mmco;
1608  h->explicit_ref_marking = sl->explicit_ref_marking;
1609 
1610  h->picture_idr = nal->type == H264_NAL_IDR_SLICE;
1611 
1612  if (h->sei.recovery_point.recovery_frame_cnt >= 0) {
1613  const int sei_recovery_frame_cnt = h->sei.recovery_point.recovery_frame_cnt;
1614 
1615  if (h->poc.frame_num != sei_recovery_frame_cnt || sl->slice_type_nos != AV_PICTURE_TYPE_I)
1616  h->valid_recovery_point = 1;
1617 
1618  if ( h->recovery_frame < 0
1619  || av_mod_uintp2(h->recovery_frame - h->poc.frame_num, h->ps.sps->log2_max_frame_num) > sei_recovery_frame_cnt) {
1620  h->recovery_frame = av_mod_uintp2(h->poc.frame_num + sei_recovery_frame_cnt, h->ps.sps->log2_max_frame_num);
1621 
1622  if (!h->valid_recovery_point)
1623  h->recovery_frame = h->poc.frame_num;
1624  }
1625  }
1626 
1627  h->cur_pic_ptr->f->key_frame |= (nal->type == H264_NAL_IDR_SLICE);
1628 
1629  if (nal->type == H264_NAL_IDR_SLICE ||
1630  (h->recovery_frame == h->poc.frame_num && nal->ref_idc)) {
1631  h->recovery_frame = -1;
1632  h->cur_pic_ptr->recovered = 1;
1633  }
1634  // If we have an IDR, all frames after it in decoded order are
1635  // "recovered".
1636  if (nal->type == H264_NAL_IDR_SLICE)
1637  h->frame_recovered |= FRAME_RECOVERED_IDR;
1638 #if 1
1639  h->cur_pic_ptr->recovered |= h->frame_recovered;
1640 #else
1641  h->cur_pic_ptr->recovered |= !!(h->frame_recovered & FRAME_RECOVERED_IDR);
1642 #endif
1643 
1644  /* Set the frame properties/side data. Only done for the second field in
1645  * field coded frames, since some SEI information is present for each field
1646  * and is merged by the SEI parsing code. */
1647  if (!FIELD_PICTURE(h) || !h->first_field || h->missing_fields > 1) {
1649  if (ret < 0)
1650  return ret;
1651 
1653  if (ret < 0)
1654  return ret;
1655  }
1656 
1657  return 0;
1658 }
1659 
1661  const H2645NAL *nal)
1662 {
1663  const SPS *sps;
1664  const PPS *pps;
1665  int ret;
1666  unsigned int slice_type, tmp, i;
1667  int field_pic_flag, bottom_field_flag;
1668  int first_slice = sl == h->slice_ctx && !h->current_slice;
1669  int picture_structure;
1670 
1671  if (first_slice)
1672  av_assert0(!h->setup_finished);
1673 
1674  sl->first_mb_addr = get_ue_golomb_long(&sl->gb);
1675 
1676  slice_type = get_ue_golomb_31(&sl->gb);
1677  if (slice_type > 9) {
1678  av_log(h->avctx, AV_LOG_ERROR,
1679  "slice type %d too large at %d\n",
1680  slice_type, sl->first_mb_addr);
1681  return AVERROR_INVALIDDATA;
1682  }
1683  if (slice_type > 4) {
1684  slice_type -= 5;
1685  sl->slice_type_fixed = 1;
1686  } else
1687  sl->slice_type_fixed = 0;
1688 
1689  slice_type = ff_h264_golomb_to_pict_type[slice_type];
1690  sl->slice_type = slice_type;
1691  sl->slice_type_nos = slice_type & 3;
1692 
1693  if (nal->type == H264_NAL_IDR_SLICE &&
1695  av_log(h->avctx, AV_LOG_ERROR, "A non-intra slice in an IDR NAL unit.\n");
1696  return AVERROR_INVALIDDATA;
1697  }
1698 
1699  sl->pps_id = get_ue_golomb(&sl->gb);
1700  if (sl->pps_id >= MAX_PPS_COUNT) {
1701  av_log(h->avctx, AV_LOG_ERROR, "pps_id %u out of range\n", sl->pps_id);
1702  return AVERROR_INVALIDDATA;
1703  }
1704  if (!h->ps.pps_list[sl->pps_id]) {
1705  av_log(h->avctx, AV_LOG_ERROR,
1706  "non-existing PPS %u referenced\n",
1707  sl->pps_id);
1708  return AVERROR_INVALIDDATA;
1709  }
1710  pps = (const PPS*)h->ps.pps_list[sl->pps_id]->data;
1711  sps = pps->sps;
1712 
1713  sl->frame_num = get_bits(&sl->gb, sps->log2_max_frame_num);
1714  if (!first_slice) {
1715  if (h->poc.frame_num != sl->frame_num) {
1716  av_log(h->avctx, AV_LOG_ERROR, "Frame num change from %d to %d\n",
1717  h->poc.frame_num, sl->frame_num);
1718  return AVERROR_INVALIDDATA;
1719  }
1720  }
1721 
1722  sl->mb_mbaff = 0;
1723 
1724  if (sps->frame_mbs_only_flag) {
1725  picture_structure = PICT_FRAME;
1726  } else {
1727  if (!sps->direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B) {
1728  av_log(h->avctx, AV_LOG_ERROR, "This stream was generated by a broken encoder, invalid 8x8 inference\n");
1729  return -1;
1730  }
1731  field_pic_flag = get_bits1(&sl->gb);
1732  if (field_pic_flag) {
1733  bottom_field_flag = get_bits1(&sl->gb);
1734  picture_structure = PICT_TOP_FIELD + bottom_field_flag;
1735  } else {
1736  picture_structure = PICT_FRAME;
1737  }
1738  }
1739  sl->picture_structure = picture_structure;
1740  sl->mb_field_decoding_flag = picture_structure != PICT_FRAME;
1741 
1742  if (picture_structure == PICT_FRAME) {
1743  sl->curr_pic_num = sl->frame_num;
1744  sl->max_pic_num = 1 << sps->log2_max_frame_num;
1745  } else {
1746  sl->curr_pic_num = 2 * sl->frame_num + 1;
1747  sl->max_pic_num = 1 << (sps->log2_max_frame_num + 1);
1748  }
1749 
1750  if (nal->type == H264_NAL_IDR_SLICE) {
1751  unsigned idr_pic_id = get_ue_golomb_long(&sl->gb);
1752  if (idr_pic_id < 65536) {
1753  sl->idr_pic_id = idr_pic_id;
1754  } else
1755  av_log(h->avctx, AV_LOG_WARNING, "idr_pic_id is invalid\n");
1756  }
1757 
1758  sl->poc_lsb = 0;
1759  sl->delta_poc_bottom = 0;
1760  if (sps->poc_type == 0) {
1761  sl->poc_lsb = get_bits(&sl->gb, sps->log2_max_poc_lsb);
1762 
1763  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1764  sl->delta_poc_bottom = get_se_golomb(&sl->gb);
1765  }
1766 
1767  sl->delta_poc[0] = sl->delta_poc[1] = 0;
1768  if (sps->poc_type == 1 && !sps->delta_pic_order_always_zero_flag) {
1769  sl->delta_poc[0] = get_se_golomb(&sl->gb);
1770 
1771  if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
1772  sl->delta_poc[1] = get_se_golomb(&sl->gb);
1773  }
1774 
1775  sl->redundant_pic_count = 0;
1776  if (pps->redundant_pic_cnt_present)
1777  sl->redundant_pic_count = get_ue_golomb(&sl->gb);
1778 
1779  if (sl->slice_type_nos == AV_PICTURE_TYPE_B)
1780  sl->direct_spatial_mv_pred = get_bits1(&sl->gb);
1781 
1783  &sl->gb, pps, sl->slice_type_nos,
1784  picture_structure, h->avctx);
1785  if (ret < 0)
1786  return ret;
1787 
1788  if (sl->slice_type_nos != AV_PICTURE_TYPE_I) {
1790  if (ret < 0) {
1791  sl->ref_count[1] = sl->ref_count[0] = 0;
1792  return ret;
1793  }
1794  }
1795 
1796  sl->pwt.use_weight = 0;
1797  for (i = 0; i < 2; i++) {
1798  sl->pwt.luma_weight_flag[i] = 0;
1799  sl->pwt.chroma_weight_flag[i] = 0;
1800  }
1801  if ((pps->weighted_pred && sl->slice_type_nos == AV_PICTURE_TYPE_P) ||
1802  (pps->weighted_bipred_idc == 1 &&
1805  sl->slice_type_nos, &sl->pwt,
1806  picture_structure, h->avctx);
1807  if (ret < 0)
1808  return ret;
1809  }
1810 
1811  sl->explicit_ref_marking = 0;
1812  if (nal->ref_idc) {
1813  ret = ff_h264_decode_ref_pic_marking(sl, &sl->gb, nal, h->avctx);
1814  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
1815  return AVERROR_INVALIDDATA;
1816  }
1817 
1818  if (sl->slice_type_nos != AV_PICTURE_TYPE_I && pps->cabac) {
1819  tmp = get_ue_golomb_31(&sl->gb);
1820  if (tmp > 2) {
1821  av_log(h->avctx, AV_LOG_ERROR, "cabac_init_idc %u overflow\n", tmp);
1822  return AVERROR_INVALIDDATA;
1823  }
1824  sl->cabac_init_idc = tmp;
1825  }
1826 
1827  sl->last_qscale_diff = 0;
1828  tmp = pps->init_qp + (unsigned)get_se_golomb(&sl->gb);
1829  if (tmp > 51 + 6 * (sps->bit_depth_luma - 8)) {
1830  av_log(h->avctx, AV_LOG_ERROR, "QP %u out of range\n", tmp);
1831  return AVERROR_INVALIDDATA;
1832  }
1833  sl->qscale = tmp;
1834  sl->chroma_qp[0] = get_chroma_qp(pps, 0, sl->qscale);
1835  sl->chroma_qp[1] = get_chroma_qp(pps, 1, sl->qscale);
1836  // FIXME qscale / qp ... stuff
1837  if (sl->slice_type == AV_PICTURE_TYPE_SP)
1838  get_bits1(&sl->gb); /* sp_for_switch_flag */
1839  if (sl->slice_type == AV_PICTURE_TYPE_SP ||
1841  get_se_golomb(&sl->gb); /* slice_qs_delta */
1842 
1843  sl->deblocking_filter = 1;
1844  sl->slice_alpha_c0_offset = 0;
1845  sl->slice_beta_offset = 0;
1846  if (pps->deblocking_filter_parameters_present) {
1847  tmp = get_ue_golomb_31(&sl->gb);
1848  if (tmp > 2) {
1849  av_log(h->avctx, AV_LOG_ERROR,
1850  "deblocking_filter_idc %u out of range\n", tmp);
1851  return AVERROR_INVALIDDATA;
1852  }
1853  sl->deblocking_filter = tmp;
1854  if (sl->deblocking_filter < 2)
1855  sl->deblocking_filter ^= 1; // 1<->0
1856 
1857  if (sl->deblocking_filter) {
1858  int slice_alpha_c0_offset_div2 = get_se_golomb(&sl->gb);
1859  int slice_beta_offset_div2 = get_se_golomb(&sl->gb);
1860  if (slice_alpha_c0_offset_div2 > 6 ||
1861  slice_alpha_c0_offset_div2 < -6 ||
1862  slice_beta_offset_div2 > 6 ||
1863  slice_beta_offset_div2 < -6) {
1864  av_log(h->avctx, AV_LOG_ERROR,
1865  "deblocking filter parameters %d %d out of range\n",
1866  slice_alpha_c0_offset_div2, slice_beta_offset_div2);
1867  return AVERROR_INVALIDDATA;
1868  }
1869  sl->slice_alpha_c0_offset = slice_alpha_c0_offset_div2 * 2;
1870  sl->slice_beta_offset = slice_beta_offset_div2 * 2;
1871  }
1872  }
1873 
1874  return 0;
1875 }
1876 
1877 /* do all the per-slice initialization needed before we can start decoding the
1878  * actual MBs */
1880  const H2645NAL *nal)
1881 {
1882  int i, j, ret = 0;
1883 
1884  if (h->picture_idr && nal->type != H264_NAL_IDR_SLICE) {
1885  av_log(h->avctx, AV_LOG_ERROR, "Invalid mix of IDR and non-IDR slices\n");
1886  return AVERROR_INVALIDDATA;
1887  }
1888 
1889  av_assert1(h->mb_num == h->mb_width * h->mb_height);
1890  if (sl->first_mb_addr << FIELD_OR_MBAFF_PICTURE(h) >= h->mb_num ||
1891  sl->first_mb_addr >= h->mb_num) {
1892  av_log(h->avctx, AV_LOG_ERROR, "first_mb_in_slice overflow\n");
1893  return AVERROR_INVALIDDATA;
1894  }
1895  sl->resync_mb_x = sl->mb_x = sl->first_mb_addr % h->mb_width;
1896  sl->resync_mb_y = sl->mb_y = (sl->first_mb_addr / h->mb_width) <<
1898  if (h->picture_structure == PICT_BOTTOM_FIELD)
1899  sl->resync_mb_y = sl->mb_y = sl->mb_y + 1;
1900  av_assert1(sl->mb_y < h->mb_height);
1901 
1902  ret = ff_h264_build_ref_list(h, sl);
1903  if (ret < 0)
1904  return ret;
1905 
1906  if (h->ps.pps->weighted_bipred_idc == 2 &&
1908  implicit_weight_table(h, sl, -1);
1909  if (FRAME_MBAFF(h)) {
1910  implicit_weight_table(h, sl, 0);
1911  implicit_weight_table(h, sl, 1);
1912  }
1913  }
1914 
1917  if (!h->setup_finished)
1919 
1920  if (h->avctx->skip_loop_filter >= AVDISCARD_ALL ||
1921  (h->avctx->skip_loop_filter >= AVDISCARD_NONKEY &&
1922  h->nal_unit_type != H264_NAL_IDR_SLICE) ||
1923  (h->avctx->skip_loop_filter >= AVDISCARD_NONINTRA &&
1925  (h->avctx->skip_loop_filter >= AVDISCARD_BIDIR &&
1927  (h->avctx->skip_loop_filter >= AVDISCARD_NONREF &&
1928  nal->ref_idc == 0))
1929  sl->deblocking_filter = 0;
1930 
1931  if (sl->deblocking_filter == 1 && h->nb_slice_ctx > 1) {
1932  if (h->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
1933  /* Cheat slightly for speed:
1934  * Do not bother to deblock across slices. */
1935  sl->deblocking_filter = 2;
1936  } else {
1937  h->postpone_filter = 1;
1938  }
1939  }
1940  sl->qp_thresh = 15 -
1942  FFMAX3(0,
1943  h->ps.pps->chroma_qp_index_offset[0],
1944  h->ps.pps->chroma_qp_index_offset[1]) +
1945  6 * (h->ps.sps->bit_depth_luma - 8);
1946 
1947  sl->slice_num = ++h->current_slice;
1948 
1949  if (sl->slice_num)
1950  h->slice_row[(sl->slice_num-1)&(MAX_SLICES-1)]= sl->resync_mb_y;
1951  if ( h->slice_row[sl->slice_num&(MAX_SLICES-1)] + 3 >= sl->resync_mb_y
1952  && h->slice_row[sl->slice_num&(MAX_SLICES-1)] <= sl->resync_mb_y
1953  && sl->slice_num >= MAX_SLICES) {
1954  //in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
1955  av_log(h->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", sl->slice_num, MAX_SLICES);
1956  }
1957 
1958  for (j = 0; j < 2; j++) {
1959  int id_list[16];
1960  int *ref2frm = h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][j];
1961  for (i = 0; i < 16; i++) {
1962  id_list[i] = 60;
1963  if (j < sl->list_count && i < sl->ref_count[j] &&
1964  sl->ref_list[j][i].parent->f->buf[0]) {
1965  int k;
1966  AVBuffer *buf = sl->ref_list[j][i].parent->f->buf[0]->buffer;
1967  for (k = 0; k < h->short_ref_count; k++)
1968  if (h->short_ref[k]->f->buf[0]->buffer == buf) {
1969  id_list[i] = k;
1970  break;
1971  }
1972  for (k = 0; k < h->long_ref_count; k++)
1973  if (h->long_ref[k] && h->long_ref[k]->f->buf[0]->buffer == buf) {
1974  id_list[i] = h->short_ref_count + k;
1975  break;
1976  }
1977  }
1978  }
1979 
1980  ref2frm[0] =
1981  ref2frm[1] = -1;
1982  for (i = 0; i < 16; i++)
1983  ref2frm[i + 2] = 4 * id_list[i] + (sl->ref_list[j][i].reference & 3);
1984  ref2frm[18 + 0] =
1985  ref2frm[18 + 1] = -1;
1986  for (i = 16; i < 48; i++)
1987  ref2frm[i + 4] = 4 * id_list[(i - 16) >> 1] +
1988  (sl->ref_list[j][i].reference & 3);
1989  }
1990 
1991  if (h->avctx->debug & FF_DEBUG_PICT_INFO) {
1992  av_log(h->avctx, AV_LOG_DEBUG,
1993  "slice:%d %c mb:%d %c%s%s frame:%d poc:%d/%d ref:%d/%d qp:%d loop:%d:%d:%d weight:%d%s %s\n",
1994  sl->slice_num,
1995  (h->picture_structure == PICT_FRAME ? 'F' : h->picture_structure == PICT_TOP_FIELD ? 'T' : 'B'),
1996  sl->mb_y * h->mb_width + sl->mb_x,
1998  sl->slice_type_fixed ? " fix" : "",
1999  nal->type == H264_NAL_IDR_SLICE ? " IDR" : "",
2000  h->poc.frame_num,
2001  h->cur_pic_ptr->field_poc[0],
2002  h->cur_pic_ptr->field_poc[1],
2003  sl->ref_count[0], sl->ref_count[1],
2004  sl->qscale,
2005  sl->deblocking_filter,
2007  sl->pwt.use_weight,
2008  sl->pwt.use_weight == 1 && sl->pwt.use_weight_chroma ? "c" : "",
2009  sl->slice_type == AV_PICTURE_TYPE_B ? (sl->direct_spatial_mv_pred ? "SPAT" : "TEMP") : "");
2010  }
2011 
2012  return 0;
2013 }
2014 
2016 {
2017  H264SliceContext *sl = h->slice_ctx + h->nb_slice_ctx_queued;
2018  int first_slice = sl == h->slice_ctx && !h->current_slice;
2019  int ret;
2020 
2021  sl->gb = nal->gb;
2022 
2023  ret = h264_slice_header_parse(h, sl, nal);
2024  if (ret < 0)
2025  return ret;
2026 
2027  // discard redundant pictures
2028  if (sl->redundant_pic_count > 0) {
2029  sl->ref_count[0] = sl->ref_count[1] = 0;
2030  return 0;
2031  }
2032 
2033  if (sl->first_mb_addr == 0 || !h->current_slice) {
2034  if (h->setup_finished) {
2035  av_log(h->avctx, AV_LOG_ERROR, "Too many fields\n");
2036  return AVERROR_INVALIDDATA;
2037  }
2038  }
2039 
2040  if (sl->first_mb_addr == 0) { // FIXME better field boundary detection
2041  if (h->current_slice) {
2042  // this slice starts a new field
2043  // first decode any pending queued slices
2044  if (h->nb_slice_ctx_queued) {
2045  H264SliceContext tmp_ctx;
2046 
2048  if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
2049  return ret;
2050 
2051  memcpy(&tmp_ctx, h->slice_ctx, sizeof(tmp_ctx));
2052  memcpy(h->slice_ctx, sl, sizeof(tmp_ctx));
2053  memcpy(sl, &tmp_ctx, sizeof(tmp_ctx));
2054  sl = h->slice_ctx;
2055  }
2056 
2057  if (h->cur_pic_ptr && FIELD_PICTURE(h) && h->first_field) {
2058  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2059  if (ret < 0)
2060  return ret;
2061  } else if (h->cur_pic_ptr && !FIELD_PICTURE(h) && !h->first_field && h->nal_unit_type == H264_NAL_IDR_SLICE) {
2062  av_log(h, AV_LOG_WARNING, "Broken frame packetizing\n");
2063  ret = ff_h264_field_end(h, h->slice_ctx, 1);
2064  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
2065  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
2066  h->cur_pic_ptr = NULL;
2067  if (ret < 0)
2068  return ret;
2069  } else
2070  return AVERROR_INVALIDDATA;
2071  }
2072 
2073  if (!h->first_field) {
2074  if (h->cur_pic_ptr && !h->droppable) {
2075  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
2076  h->picture_structure == PICT_BOTTOM_FIELD);
2077  }
2078  h->cur_pic_ptr = NULL;
2079  }
2080  }
2081 
2082  if (!h->current_slice)
2083  av_assert0(sl == h->slice_ctx);
2084 
2085  if (h->current_slice == 0 && !h->first_field) {
2086  if (
2087  (h->avctx->skip_frame >= AVDISCARD_NONREF && !h->nal_ref_idc) ||
2088  (h->avctx->skip_frame >= AVDISCARD_BIDIR && sl->slice_type_nos == AV_PICTURE_TYPE_B) ||
2089  (h->avctx->skip_frame >= AVDISCARD_NONINTRA && sl->slice_type_nos != AV_PICTURE_TYPE_I) ||
2090  (h->avctx->skip_frame >= AVDISCARD_NONKEY && h->nal_unit_type != H264_NAL_IDR_SLICE && h->sei.recovery_point.recovery_frame_cnt < 0) ||
2091  h->avctx->skip_frame >= AVDISCARD_ALL) {
2092  return 0;
2093  }
2094  }
2095 
2096  if (!first_slice) {
2097  const PPS *pps = (const PPS*)h->ps.pps_list[sl->pps_id]->data;
2098 
2099  if (h->ps.pps->sps_id != pps->sps_id ||
2100  h->ps.pps->transform_8x8_mode != pps->transform_8x8_mode /*||
2101  (h->setup_finished && h->ps.pps != pps)*/) {
2102  av_log(h->avctx, AV_LOG_ERROR, "PPS changed between slices\n");
2103  return AVERROR_INVALIDDATA;
2104  }
2105  if (h->ps.sps != pps->sps) {
2106  av_log(h->avctx, AV_LOG_ERROR,
2107  "SPS changed in the middle of the frame\n");
2108  return AVERROR_INVALIDDATA;
2109  }
2110  }
2111 
2112  if (h->current_slice == 0) {
2113  ret = h264_field_start(h, sl, nal, first_slice);
2114  if (ret < 0)
2115  return ret;
2116  } else {
2117  if (h->picture_structure != sl->picture_structure ||
2118  h->droppable != (nal->ref_idc == 0)) {
2119  av_log(h->avctx, AV_LOG_ERROR,
2120  "Changing field mode (%d -> %d) between slices is not allowed\n",
2121  h->picture_structure, sl->picture_structure);
2122  return AVERROR_INVALIDDATA;
2123  } else if (!h->cur_pic_ptr) {
2124  av_log(h->avctx, AV_LOG_ERROR,
2125  "unset cur_pic_ptr on slice %d\n",
2126  h->current_slice + 1);
2127  return AVERROR_INVALIDDATA;
2128  }
2129  }
2130 
2131  ret = h264_slice_init(h, sl, nal);
2132  if (ret < 0)
2133  return ret;
2134 
2135  h->nb_slice_ctx_queued++;
2136 
2137  return 0;
2138 }
2139 
2141 {
2142  switch (sl->slice_type) {
2143  case AV_PICTURE_TYPE_P:
2144  return 0;
2145  case AV_PICTURE_TYPE_B:
2146  return 1;
2147  case AV_PICTURE_TYPE_I:
2148  return 2;
2149  case AV_PICTURE_TYPE_SP:
2150  return 3;
2151  case AV_PICTURE_TYPE_SI:
2152  return 4;
2153  default:
2154  return AVERROR_INVALIDDATA;
2155  }
2156 }
2157 
2159  H264SliceContext *sl,
2160  int mb_type, int top_xy,
2161  int left_xy[LEFT_MBS],
2162  int top_type,
2163  int left_type[LEFT_MBS],
2164  int mb_xy, int list)
2165 {
2166  int b_stride = h->b_stride;
2167  int16_t(*mv_dst)[2] = &sl->mv_cache[list][scan8[0]];
2168  int8_t *ref_cache = &sl->ref_cache[list][scan8[0]];
2169  if (IS_INTER(mb_type) || IS_DIRECT(mb_type)) {
2170  if (USES_LIST(top_type, list)) {
2171  const int b_xy = h->mb2b_xy[top_xy] + 3 * b_stride;
2172  const int b8_xy = 4 * top_xy + 2;
2173  const int *ref2frm = &h->ref2frm[h->slice_table[top_xy] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2174  AV_COPY128(mv_dst - 1 * 8, h->cur_pic.motion_val[list][b_xy + 0]);
2175  ref_cache[0 - 1 * 8] =
2176  ref_cache[1 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 0]];
2177  ref_cache[2 - 1 * 8] =
2178  ref_cache[3 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 1]];
2179  } else {
2180  AV_ZERO128(mv_dst - 1 * 8);
2181  AV_WN32A(&ref_cache[0 - 1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2182  }
2183 
2184  if (!IS_INTERLACED(mb_type ^ left_type[LTOP])) {
2185  if (USES_LIST(left_type[LTOP], list)) {
2186  const int b_xy = h->mb2b_xy[left_xy[LTOP]] + 3;
2187  const int b8_xy = 4 * left_xy[LTOP] + 1;
2188  const int *ref2frm = &h->ref2frm[h->slice_table[left_xy[LTOP]] & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2189  AV_COPY32(mv_dst - 1 + 0, h->cur_pic.motion_val[list][b_xy + b_stride * 0]);
2190  AV_COPY32(mv_dst - 1 + 8, h->cur_pic.motion_val[list][b_xy + b_stride * 1]);
2191  AV_COPY32(mv_dst - 1 + 16, h->cur_pic.motion_val[list][b_xy + b_stride * 2]);
2192  AV_COPY32(mv_dst - 1 + 24, h->cur_pic.motion_val[list][b_xy + b_stride * 3]);
2193  ref_cache[-1 + 0] =
2194  ref_cache[-1 + 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 0]];
2195  ref_cache[-1 + 16] =
2196  ref_cache[-1 + 24] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 1]];
2197  } else {
2198  AV_ZERO32(mv_dst - 1 + 0);
2199  AV_ZERO32(mv_dst - 1 + 8);
2200  AV_ZERO32(mv_dst - 1 + 16);
2201  AV_ZERO32(mv_dst - 1 + 24);
2202  ref_cache[-1 + 0] =
2203  ref_cache[-1 + 8] =
2204  ref_cache[-1 + 16] =
2205  ref_cache[-1 + 24] = LIST_NOT_USED;
2206  }
2207  }
2208  }
2209 
2210  if (!USES_LIST(mb_type, list)) {
2211  fill_rectangle(mv_dst, 4, 4, 8, pack16to32(0, 0), 4);
2212  AV_WN32A(&ref_cache[0 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2213  AV_WN32A(&ref_cache[1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2214  AV_WN32A(&ref_cache[2 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2215  AV_WN32A(&ref_cache[3 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
2216  return;
2217  }
2218 
2219  {
2220  int8_t *ref = &h->cur_pic.ref_index[list][4 * mb_xy];
2221  const int *ref2frm = &h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][list][(MB_MBAFF(sl) ? 20 : 2)];
2222  uint32_t ref01 = (pack16to32(ref2frm[ref[0]], ref2frm[ref[1]]) & 0x00FF00FF) * 0x0101;
2223  uint32_t ref23 = (pack16to32(ref2frm[ref[2]], ref2frm[ref[3]]) & 0x00FF00FF) * 0x0101;
2224  AV_WN32A(&ref_cache[0 * 8], ref01);
2225  AV_WN32A(&ref_cache[1 * 8], ref01);
2226  AV_WN32A(&ref_cache[2 * 8], ref23);
2227  AV_WN32A(&ref_cache[3 * 8], ref23);
2228  }
2229 
2230  {
2231  int16_t(*mv_src)[2] = &h->cur_pic.motion_val[list][4 * sl->mb_x + 4 * sl->mb_y * b_stride];
2232  AV_COPY128(mv_dst + 8 * 0, mv_src + 0 * b_stride);
2233  AV_COPY128(mv_dst + 8 * 1, mv_src + 1 * b_stride);
2234  AV_COPY128(mv_dst + 8 * 2, mv_src + 2 * b_stride);
2235  AV_COPY128(mv_dst + 8 * 3, mv_src + 3 * b_stride);
2236  }
2237 }
2238 
2239 /**
2240  * @return non zero if the loop filter can be skipped
2241  */
2242 static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
2243 {
2244  const int mb_xy = sl->mb_xy;
2245  int top_xy, left_xy[LEFT_MBS];
2246  int top_type, left_type[LEFT_MBS];
2247  uint8_t *nnz;
2248  uint8_t *nnz_cache;
2249 
2250  top_xy = mb_xy - (h->mb_stride << MB_FIELD(sl));
2251 
2252  left_xy[LBOT] = left_xy[LTOP] = mb_xy - 1;
2253  if (FRAME_MBAFF(h)) {
2254  const int left_mb_field_flag = IS_INTERLACED(h->cur_pic.mb_type[mb_xy - 1]);
2255  const int curr_mb_field_flag = IS_INTERLACED(mb_type);
2256  if (sl->mb_y & 1) {
2257  if (left_mb_field_flag != curr_mb_field_flag)
2258  left_xy[LTOP] -= h->mb_stride;
2259  } else {
2260  if (curr_mb_field_flag)
2261  top_xy += h->mb_stride &
2262  (((h->cur_pic.mb_type[top_xy] >> 7) & 1) - 1);
2263  if (left_mb_field_flag != curr_mb_field_flag)
2264  left_xy[LBOT] += h->mb_stride;
2265  }
2266  }
2267 
2268  sl->top_mb_xy = top_xy;
2269  sl->left_mb_xy[LTOP] = left_xy[LTOP];
2270  sl->left_mb_xy[LBOT] = left_xy[LBOT];
2271  {
2272  /* For sufficiently low qp, filtering wouldn't do anything.
2273  * This is a conservative estimate: could also check beta_offset
2274  * and more accurate chroma_qp. */
2275  int qp_thresh = sl->qp_thresh; // FIXME strictly we should store qp_thresh for each mb of a slice
2276  int qp = h->cur_pic.qscale_table[mb_xy];
2277  if (qp <= qp_thresh &&
2278  (left_xy[LTOP] < 0 ||
2279  ((qp + h->cur_pic.qscale_table[left_xy[LTOP]] + 1) >> 1) <= qp_thresh) &&
2280  (top_xy < 0 ||
2281  ((qp + h->cur_pic.qscale_table[top_xy] + 1) >> 1) <= qp_thresh)) {
2282  if (!FRAME_MBAFF(h))
2283  return 1;
2284  if ((left_xy[LTOP] < 0 ||
2285  ((qp + h->cur_pic.qscale_table[left_xy[LBOT]] + 1) >> 1) <= qp_thresh) &&
2286  (top_xy < h->mb_stride ||
2287  ((qp + h->cur_pic.qscale_table[top_xy - h->mb_stride] + 1) >> 1) <= qp_thresh))
2288  return 1;
2289  }
2290  }
2291 
2292  top_type = h->cur_pic.mb_type[top_xy];
2293  left_type[LTOP] = h->cur_pic.mb_type[left_xy[LTOP]];
2294  left_type[LBOT] = h->cur_pic.mb_type[left_xy[LBOT]];
2295  if (sl->deblocking_filter == 2) {
2296  if (h->slice_table[top_xy] != sl->slice_num)
2297  top_type = 0;
2298  if (h->slice_table[left_xy[LBOT]] != sl->slice_num)
2299  left_type[LTOP] = left_type[LBOT] = 0;
2300  } else {
2301  if (h->slice_table[top_xy] == 0xFFFF)
2302  top_type = 0;
2303  if (h->slice_table[left_xy[LBOT]] == 0xFFFF)
2304  left_type[LTOP] = left_type[LBOT] = 0;
2305  }
2306  sl->top_type = top_type;
2307  sl->left_type[LTOP] = left_type[LTOP];
2308  sl->left_type[LBOT] = left_type[LBOT];
2309 
2310  if (IS_INTRA(mb_type))
2311  return 0;
2312 
2313  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2314  top_type, left_type, mb_xy, 0);
2315  if (sl->list_count == 2)
2316  fill_filter_caches_inter(h, sl, mb_type, top_xy, left_xy,
2317  top_type, left_type, mb_xy, 1);
2318 
2319  nnz = h->non_zero_count[mb_xy];
2320  nnz_cache = sl->non_zero_count_cache;
2321  AV_COPY32(&nnz_cache[4 + 8 * 1], &nnz[0]);
2322  AV_COPY32(&nnz_cache[4 + 8 * 2], &nnz[4]);
2323  AV_COPY32(&nnz_cache[4 + 8 * 3], &nnz[8]);
2324  AV_COPY32(&nnz_cache[4 + 8 * 4], &nnz[12]);
2325  sl->cbp = h->cbp_table[mb_xy];
2326 
2327  if (top_type) {
2328  nnz = h->non_zero_count[top_xy];
2329  AV_COPY32(&nnz_cache[4 + 8 * 0], &nnz[3 * 4]);
2330  }
2331 
2332  if (left_type[LTOP]) {
2333  nnz = h->non_zero_count[left_xy[LTOP]];
2334  nnz_cache[3 + 8 * 1] = nnz[3 + 0 * 4];
2335  nnz_cache[3 + 8 * 2] = nnz[3 + 1 * 4];
2336  nnz_cache[3 + 8 * 3] = nnz[3 + 2 * 4];
2337  nnz_cache[3 + 8 * 4] = nnz[3 + 3 * 4];
2338  }
2339 
2340  /* CAVLC 8x8dct requires NNZ values for residual decoding that differ
2341  * from what the loop filter needs */
2342  if (!CABAC(h) && h->ps.pps->transform_8x8_mode) {
2343  if (IS_8x8DCT(top_type)) {
2344  nnz_cache[4 + 8 * 0] =
2345  nnz_cache[5 + 8 * 0] = (h->cbp_table[top_xy] & 0x4000) >> 12;
2346  nnz_cache[6 + 8 * 0] =
2347  nnz_cache[7 + 8 * 0] = (h->cbp_table[top_xy] & 0x8000) >> 12;
2348  }
2349  if (IS_8x8DCT(left_type[LTOP])) {
2350  nnz_cache[3 + 8 * 1] =
2351  nnz_cache[3 + 8 * 2] = (h->cbp_table[left_xy[LTOP]] & 0x2000) >> 12; // FIXME check MBAFF
2352  }
2353  if (IS_8x8DCT(left_type[LBOT])) {
2354  nnz_cache[3 + 8 * 3] =
2355  nnz_cache[3 + 8 * 4] = (h->cbp_table[left_xy[LBOT]] & 0x8000) >> 12; // FIXME check MBAFF
2356  }
2357 
2358  if (IS_8x8DCT(mb_type)) {
2359  nnz_cache[scan8[0]] =
2360  nnz_cache[scan8[1]] =
2361  nnz_cache[scan8[2]] =
2362  nnz_cache[scan8[3]] = (sl->cbp & 0x1000) >> 12;
2363 
2364  nnz_cache[scan8[0 + 4]] =
2365  nnz_cache[scan8[1 + 4]] =
2366  nnz_cache[scan8[2 + 4]] =
2367  nnz_cache[scan8[3 + 4]] = (sl->cbp & 0x2000) >> 12;
2368 
2369  nnz_cache[scan8[0 + 8]] =
2370  nnz_cache[scan8[1 + 8]] =
2371  nnz_cache[scan8[2 + 8]] =
2372  nnz_cache[scan8[3 + 8]] = (sl->cbp & 0x4000) >> 12;
2373 
2374  nnz_cache[scan8[0 + 12]] =
2375  nnz_cache[scan8[1 + 12]] =
2376  nnz_cache[scan8[2 + 12]] =
2377  nnz_cache[scan8[3 + 12]] = (sl->cbp & 0x8000) >> 12;
2378  }
2379  }
2380 
2381  return 0;
2382 }
2383 
2384 static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
2385 {
2386  uint8_t *dest_y, *dest_cb, *dest_cr;
2387  int linesize, uvlinesize, mb_x, mb_y;
2388  const int end_mb_y = sl->mb_y + FRAME_MBAFF(h);
2389  const int old_slice_type = sl->slice_type;
2390  const int pixel_shift = h->pixel_shift;
2391  const int block_h = 16 >> h->chroma_y_shift;
2392 
2393  if (h->postpone_filter)
2394  return;
2395 
2396  if (sl->deblocking_filter) {
2397  for (mb_x = start_x; mb_x < end_x; mb_x++)
2398  for (mb_y = end_mb_y - FRAME_MBAFF(h); mb_y <= end_mb_y; mb_y++) {
2399  int mb_xy, mb_type;
2400  mb_xy = sl->mb_xy = mb_x + mb_y * h->mb_stride;
2401  mb_type = h->cur_pic.mb_type[mb_xy];
2402 
2403  if (FRAME_MBAFF(h))
2404  sl->mb_mbaff =
2405  sl->mb_field_decoding_flag = !!IS_INTERLACED(mb_type);
2406 
2407  sl->mb_x = mb_x;
2408  sl->mb_y = mb_y;
2409  dest_y = h->cur_pic.f->data[0] +
2410  ((mb_x << pixel_shift) + mb_y * sl->linesize) * 16;
2411  dest_cb = h->cur_pic.f->data[1] +
2412  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2413  mb_y * sl->uvlinesize * block_h;
2414  dest_cr = h->cur_pic.f->data[2] +
2415  (mb_x << pixel_shift) * (8 << CHROMA444(h)) +
2416  mb_y * sl->uvlinesize * block_h;
2417  // FIXME simplify above
2418 
2419  if (MB_FIELD(sl)) {
2420  linesize = sl->mb_linesize = sl->linesize * 2;
2421  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize * 2;
2422  if (mb_y & 1) { // FIXME move out of this function?
2423  dest_y -= sl->linesize * 15;
2424  dest_cb -= sl->uvlinesize * (block_h - 1);
2425  dest_cr -= sl->uvlinesize * (block_h - 1);
2426  }
2427  } else {
2428  linesize = sl->mb_linesize = sl->linesize;
2429  uvlinesize = sl->mb_uvlinesize = sl->uvlinesize;
2430  }
2431  backup_mb_border(h, sl, dest_y, dest_cb, dest_cr, linesize,
2432  uvlinesize, 0);
2433  if (fill_filter_caches(h, sl, mb_type))
2434  continue;
2435  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, h->cur_pic.qscale_table[mb_xy]);
2436  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, h->cur_pic.qscale_table[mb_xy]);
2437 
2438  if (FRAME_MBAFF(h)) {
2439  ff_h264_filter_mb(h, sl, mb_x, mb_y, dest_y, dest_cb, dest_cr,
2440  linesize, uvlinesize);
2441  } else {
2442  ff_h264_filter_mb_fast(h, sl, mb_x, mb_y, dest_y, dest_cb,
2443  dest_cr, linesize, uvlinesize);
2444  }
2445  }
2446  }
2447  sl->slice_type = old_slice_type;
2448  sl->mb_x = end_x;
2449  sl->mb_y = end_mb_y - FRAME_MBAFF(h);
2450  sl->chroma_qp[0] = get_chroma_qp(h->ps.pps, 0, sl->qscale);
2451  sl->chroma_qp[1] = get_chroma_qp(h->ps.pps, 1, sl->qscale);
2452 }
2453 
2455 {
2456  const int mb_xy = sl->mb_x + sl->mb_y * h->mb_stride;
2457  int mb_type = (h->slice_table[mb_xy - 1] == sl->slice_num) ?
2458  h->cur_pic.mb_type[mb_xy - 1] :
2459  (h->slice_table[mb_xy - h->mb_stride] == sl->slice_num) ?
2460  h->cur_pic.mb_type[mb_xy - h->mb_stride] : 0;
2461  sl->mb_mbaff = sl->mb_field_decoding_flag = IS_INTERLACED(mb_type) ? 1 : 0;
2462 }
2463 
2464 /**
2465  * Draw edges and report progress for the last MB row.
2466  */
2468 {
2469  int top = 16 * (sl->mb_y >> FIELD_PICTURE(h));
2470  int pic_height = 16 * h->mb_height >> FIELD_PICTURE(h);
2471  int height = 16 << FRAME_MBAFF(h);
2472  int deblock_border = (16 + 4) << FRAME_MBAFF(h);
2473 
2474  if (sl->deblocking_filter) {
2475  if ((top + height) >= pic_height)
2476  height += deblock_border;
2477  top -= deblock_border;
2478  }
2479 
2480  if (top >= pic_height || (top + height) < 0)
2481  return;
2482 
2483  height = FFMIN(height, pic_height - top);
2484  if (top < 0) {
2485  height = top + height;
2486  top = 0;
2487  }
2488 
2489  ff_h264_draw_horiz_band(h, sl, top, height);
2490 
2491  if (h->droppable || h->er.error_occurred)
2492  return;
2493 
2494  ff_thread_report_progress(&h->cur_pic_ptr->tf, top + height - 1,
2495  h->picture_structure == PICT_BOTTOM_FIELD);
2496 }
2497 
2499  int startx, int starty,
2500  int endx, int endy, int status)
2501 {
2502  if (!sl->h264->enable_er)
2503  return;
2504 
2505  if (CONFIG_ERROR_RESILIENCE) {
2506  ff_er_add_slice(sl->er, startx, starty, endx, endy, status);
2507  }
2508 }
2509 
2510 static int decode_slice(struct AVCodecContext *avctx, void *arg)
2511 {
2512  H264SliceContext *sl = arg;
2513  const H264Context *h = sl->h264;
2514  int lf_x_start = sl->mb_x;
2515  int orig_deblock = sl->deblocking_filter;
2516  int ret;
2517 
2518  sl->linesize = h->cur_pic_ptr->f->linesize[0];
2519  sl->uvlinesize = h->cur_pic_ptr->f->linesize[1];
2520 
2521  ret = alloc_scratch_buffers(sl, sl->linesize);
2522  if (ret < 0)
2523  return ret;
2524 
2525  sl->mb_skip_run = -1;
2526 
2527  av_assert0(h->block_offset[15] == (4 * ((scan8[15] - scan8[0]) & 7) << h->pixel_shift) + 4 * sl->linesize * ((scan8[15] - scan8[0]) >> 3));
2528 
2529  if (h->postpone_filter)
2530  sl->deblocking_filter = 0;
2531 
2532  sl->is_complex = FRAME_MBAFF(h) || h->picture_structure != PICT_FRAME ||
2533  (CONFIG_GRAY && (h->flags & AV_CODEC_FLAG_GRAY));
2534 
2535  if (!(h->avctx->active_thread_type & FF_THREAD_SLICE) && h->picture_structure == PICT_FRAME && sl->er->error_status_table) {
2536  const int start_i = av_clip(sl->resync_mb_x + sl->resync_mb_y * h->mb_width, 0, h->mb_num - 1);
2537  if (start_i) {
2538  int prev_status = sl->er->error_status_table[sl->er->mb_index2xy[start_i - 1]];
2539  prev_status &= ~ VP_START;
2540  if (prev_status != (ER_MV_END | ER_DC_END | ER_AC_END))
2541  sl->er->error_occurred = 1;
2542  }
2543  }
2544 
2545  if (h->ps.pps->cabac) {
2546  /* realign */
2547  align_get_bits(&sl->gb);
2548 
2549  /* init cabac */
2551  sl->gb.buffer + get_bits_count(&sl->gb) / 8,
2552  (get_bits_left(&sl->gb) + 7) / 8);
2553  if (ret < 0)
2554  return ret;
2555 
2557 
2558  for (;;) {
2559  int ret, eos;
2560  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2561  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2562  sl->next_slice_idx);
2563  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2564  sl->mb_y, ER_MB_ERROR);
2565  return AVERROR_INVALIDDATA;
2566  }
2567 
2568  ret = ff_h264_decode_mb_cabac(h, sl);
2569 
2570  if (ret >= 0)
2571  ff_h264_hl_decode_mb(h, sl);
2572 
2573  // FIXME optimal? or let mb_decode decode 16x32 ?
2574  if (ret >= 0 && FRAME_MBAFF(h)) {
2575  sl->mb_y++;
2576 
2577  ret = ff_h264_decode_mb_cabac(h, sl);
2578 
2579  if (ret >= 0)
2580  ff_h264_hl_decode_mb(h, sl);
2581  sl->mb_y--;
2582  }
2583  eos = get_cabac_terminate(&sl->cabac);
2584 
2585  if ((h->workaround_bugs & FF_BUG_TRUNCATED) &&
2586  sl->cabac.bytestream > sl->cabac.bytestream_end + 2) {
2587  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2588  sl->mb_y, ER_MB_END);
2589  if (sl->mb_x >= lf_x_start)
2590  loop_filter(h, sl, lf_x_start, sl->mb_x + 1);
2591  goto finish;
2592  }
2593  if (sl->cabac.bytestream > sl->cabac.bytestream_end + 2 )
2594  av_log(h->avctx, AV_LOG_DEBUG, "bytestream overread %"PTRDIFF_SPECIFIER"\n", sl->cabac.bytestream_end - sl->cabac.bytestream);
2595  if (ret < 0 || sl->cabac.bytestream > sl->cabac.bytestream_end + 4) {
2596  av_log(h->avctx, AV_LOG_ERROR,
2597  "error while decoding MB %d %d, bytestream %"PTRDIFF_SPECIFIER"\n",
2598  sl->mb_x, sl->mb_y,
2599  sl->cabac.bytestream_end - sl->cabac.bytestream);
2600  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2601  sl->mb_y, ER_MB_ERROR);
2602  return AVERROR_INVALIDDATA;
2603  }
2604 
2605  if (++sl->mb_x >= h->mb_width) {
2606  loop_filter(h, sl, lf_x_start, sl->mb_x);
2607  sl->mb_x = lf_x_start = 0;
2608  decode_finish_row(h, sl);
2609  ++sl->mb_y;
2610  if (FIELD_OR_MBAFF_PICTURE(h)) {
2611  ++sl->mb_y;
2612  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2614  }
2615  }
2616 
2617  if (eos || sl->mb_y >= h->mb_height) {
2618  ff_tlog(h->avctx, "slice end %d %d\n",
2619  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2620  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x - 1,
2621  sl->mb_y, ER_MB_END);
2622  if (sl->mb_x > lf_x_start)
2623  loop_filter(h, sl, lf_x_start, sl->mb_x);
2624  goto finish;
2625  }
2626  }
2627  } else {
2628  for (;;) {
2629  int ret;
2630 
2631  if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
2632  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
2633  sl->next_slice_idx);
2634  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2635  sl->mb_y, ER_MB_ERROR);
2636  return AVERROR_INVALIDDATA;
2637  }
2638 
2639  ret = ff_h264_decode_mb_cavlc(h, sl);
2640 
2641  if (ret >= 0)
2642  ff_h264_hl_decode_mb(h, sl);
2643 
2644  // FIXME optimal? or let mb_decode decode 16x32 ?
2645  if (ret >= 0 && FRAME_MBAFF(h)) {
2646  sl->mb_y++;
2647  ret = ff_h264_decode_mb_cavlc(h, sl);
2648 
2649  if (ret >= 0)
2650  ff_h264_hl_decode_mb(h, sl);
2651  sl->mb_y--;
2652  }
2653 
2654  if (ret < 0) {
2655  av_log(h->avctx, AV_LOG_ERROR,
2656  "error while decoding MB %d %d\n", sl->mb_x, sl->mb_y);
2657  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2658  sl->mb_y, ER_MB_ERROR);
2659  return ret;
2660  }
2661 
2662  if (++sl->mb_x >= h->mb_width) {
2663  loop_filter(h, sl, lf_x_start, sl->mb_x);
2664  sl->mb_x = lf_x_start = 0;
2665  decode_finish_row(h, sl);
2666  ++sl->mb_y;
2667  if (FIELD_OR_MBAFF_PICTURE(h)) {
2668  ++sl->mb_y;
2669  if (FRAME_MBAFF(h) && sl->mb_y < h->mb_height)
2671  }
2672  if (sl->mb_y >= h->mb_height) {
2673  ff_tlog(h->avctx, "slice end %d %d\n",
2674  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2675 
2676  if ( get_bits_left(&sl->gb) == 0
2677  || get_bits_left(&sl->gb) > 0 && !(h->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
2678  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2679  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2680 
2681  goto finish;
2682  } else {
2683  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2684  sl->mb_x, sl->mb_y, ER_MB_END);
2685 
2686  return AVERROR_INVALIDDATA;
2687  }
2688  }
2689  }
2690 
2691  if (get_bits_left(&sl->gb) <= 0 && sl->mb_skip_run <= 0) {
2692  ff_tlog(h->avctx, "slice end %d %d\n",
2693  get_bits_count(&sl->gb), sl->gb.size_in_bits);
2694 
2695  if (get_bits_left(&sl->gb) == 0) {
2696  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
2697  sl->mb_x - 1, sl->mb_y, ER_MB_END);
2698  if (sl->mb_x > lf_x_start)
2699  loop_filter(h, sl, lf_x_start, sl->mb_x);
2700 
2701  goto finish;
2702  } else {
2703  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
2704  sl->mb_y, ER_MB_ERROR);
2705 
2706  return AVERROR_INVALIDDATA;
2707  }
2708  }
2709  }
2710  }
2711 
2712 finish:
2713  sl->deblocking_filter = orig_deblock;
2714  return 0;
2715 }
2716 
2717 /**
2718  * Call decode_slice() for each context.
2719  *
2720  * @param h h264 master context
2721  */
2723 {
2724  AVCodecContext *const avctx = h->avctx;
2725  H264SliceContext *sl;
2726  int context_count = h->nb_slice_ctx_queued;
2727  int ret = 0;
2728  int i, j;
2729 
2730  h->slice_ctx[0].next_slice_idx = INT_MAX;
2731 
2732  if (h->avctx->hwaccel || context_count < 1)
2733  return 0;
2734 
2735  av_assert0(context_count && h->slice_ctx[context_count - 1].mb_y < h->mb_height);
2736 
2737  if (context_count == 1) {
2738 
2739  h->slice_ctx[0].next_slice_idx = h->mb_width * h->mb_height;
2740  h->postpone_filter = 0;
2741 
2742  ret = decode_slice(avctx, &h->slice_ctx[0]);
2743  h->mb_y = h->slice_ctx[0].mb_y;
2744  if (ret < 0)
2745  goto finish;
2746  } else {
2747  av_assert0(context_count > 0);
2748  for (i = 0; i < context_count; i++) {
2749  int next_slice_idx = h->mb_width * h->mb_height;
2750  int slice_idx;
2751 
2752  sl = &h->slice_ctx[i];
2753 
2754  /* make sure none of those slices overlap */
2755  slice_idx = sl->mb_y * h->mb_width + sl->mb_x;
2756  for (j = 0; j < context_count; j++) {
2757  H264SliceContext *sl2 = &h->slice_ctx[j];
2758  int slice_idx2 = sl2->mb_y * h->mb_width + sl2->mb_x;
2759 
2760  if (i == j || slice_idx2 < slice_idx)
2761  continue;
2762  next_slice_idx = FFMIN(next_slice_idx, slice_idx2);
2763  }
2764  sl->next_slice_idx = next_slice_idx;
2765  }
2766 
2767  avctx->execute(avctx, decode_slice, h->slice_ctx,
2768  NULL, context_count, sizeof(h->slice_ctx[0]));
2769 
2770  /* pull back stuff from slices to master context */
2771  sl = &h->slice_ctx[context_count - 1];
2772  h->mb_y = sl->mb_y;
2773 
2774  if (h->postpone_filter) {
2775  h->postpone_filter = 0;
2776 
2777  for (i = 0; i < context_count; i++) {
2778  int y_end, x_end;
2779 
2780  sl = &h->slice_ctx[i];
2781  y_end = FFMIN(sl->mb_y + 1, h->mb_height);
2782  x_end = (sl->mb_y >= h->mb_height) ? h->mb_width : sl->mb_x;
2783 
2784  for (j = sl->resync_mb_y; j < y_end; j += 1 + FIELD_OR_MBAFF_PICTURE(h)) {
2785  sl->mb_y = j;
2786  loop_filter(h, sl, j > sl->resync_mb_y ? 0 : sl->resync_mb_x,
2787  j == y_end - 1 ? x_end : h->mb_width);
2788  }
2789  }
2790  }
2791  }
2792 
2793 finish:
2794  h->nb_slice_ctx_queued = 0;
2795  return ret;
2796 }
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:38
er_add_slice
static void er_add_slice(H264SliceContext *sl, int startx, int starty, int endx, int endy, int status)
Definition: h264_slice.c:2498
ff_h264_filter_mb_fast
void ff_h264_filter_mb_fast(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
Definition: h264_loopfilter.c:416
h264_slice_header_init
static int h264_slice_header_init(H264Context *h)
Definition: h264_slice.c:930
implicit_weight_table
static void implicit_weight_table(const H264Context *h, H264SliceContext *sl, int field)
Initialize implicit_weight table.
Definition: h264_slice.c:674
H264SliceContext::mb_xy
int mb_xy
Definition: h264dec.h:224
ff_h264_unref_picture
void ff_h264_unref_picture(H264Context *h, H264Picture *pic)
Definition: h264_picture.c:36
av_buffer_pool_init
AVBufferPool * av_buffer_pool_init(size_t size, AVBufferRef *(*alloc)(size_t size))
Allocate and initialize a buffer pool.
Definition: buffer.c:280
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:253
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
H264SliceContext::ref_cache
int8_t ref_cache[2][5 *8]
Definition: h264dec.h:292
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
ff_h264_free_tables
void ff_h264_free_tables(H264Context *h)
Definition: h264dec.c:134
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: defs.h:51
av_clip
#define av_clip
Definition: common.h:95
h264_init_ps
static int h264_init_ps(H264Context *h, const H264SliceContext *sl, int first_slice)
Definition: h264_slice.c:1023
H264SliceContext::max_pic_num
int max_pic_num
Definition: h264dec.h:324
H264SliceContext::nb_mmco
int nb_mmco
Definition: h264dec.h:315
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:839
CHROMA422
#define CHROMA422(h)
Definition: h264dec.h:92
FF_BUG_TRUNCATED
#define FF_BUG_TRUNCATED
Definition: avcodec.h:1294
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
cabac.h
H264Picture::poc
int poc
frame POC
Definition: h264dec.h:129
h264_export_frame_props
static int h264_export_frame_props(H264Context *h)
Definition: h264_slice.c:1142
ff_h264_sei_ctx_replace
static int ff_h264_sei_ctx_replace(H264SEIContext *dst, const H264SEIContext *src)
Definition: h264_sei.h:132
ff_thread_release_ext_buffer
void ff_thread_release_ext_buffer(AVCodecContext *avctx, ThreadFrame *f)
Unref a ThreadFrame.
Definition: pthread_frame.c:1178
H264Picture::f
AVFrame * f
Definition: h264dec.h:108
out
FILE * out
Definition: movenc.c:54
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:239
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, size_t size)
Add a new side data to a frame.
Definition: frame.c:679
av_clip_int8
#define av_clip_int8
Definition: common.h:104
zigzag_scan8x8_cavlc
static const uint8_t zigzag_scan8x8_cavlc[64+1]
Definition: h264_slice.c:99
ff_h264_replace_picture
int ff_h264_replace_picture(H264Context *h, H264Picture *dst, const H264Picture *src)
Definition: h264_picture.c:145
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_thread_can_start_frame
int ff_thread_can_start_frame(AVCodecContext *avctx)
Definition: pthread_frame.c:971
ff_h2645_sei_to_frame
int ff_h2645_sei_to_frame(AVFrame *frame, H2645SEI *sei, enum AVCodecID codec_id, AVCodecContext *avctx, const H2645VUI *vui, unsigned bit_depth_luma, unsigned bit_depth_chroma, int seed)
Definition: h2645_sei.c:430
H264Picture::ref_index
int8_t * ref_index[2]
Definition: h264dec.h:126
HWACCEL_MAX
#define HWACCEL_MAX
AVFrame::coded_picture_number
int coded_picture_number
picture number in bitstream order
Definition: frame.h:452
MB_MBAFF
#define MB_MBAFF(h)
Definition: h264dec.h:65
H264SliceContext::mvd_table
uint8_t(*[2] mvd_table)[2]
Definition: h264dec.h:305
ff_h264_set_erpic
void ff_h264_set_erpic(ERPicture *dst, H264Picture *src)
Definition: h264_picture.c:196
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:219
H264_SEI_PIC_STRUCT_TOP_BOTTOM
@ H264_SEI_PIC_STRUCT_TOP_BOTTOM
3: top field, bottom field, in that order
Definition: h264_sei.h:35
H264Picture::pps
const PPS * pps
Definition: h264dec.h:153
AV_FRAME_DATA_S12M_TIMECODE
@ AV_FRAME_DATA_S12M_TIMECODE
Timecode which conforms to SMPTE ST 12-1.
Definition: frame.h:152
av_mod_uintp2
#define av_mod_uintp2
Definition: common.h:122
GetBitContext::size_in_bits
int size_in_bits
Definition: get_bits.h:68
H2645NAL::ref_idc
int ref_idc
H.264 only, nal_ref_idc.
Definition: h2645_parse.h:57
ff_h264_slice_context_init
void ff_h264_slice_context_init(H264Context *h, H264SliceContext *sl)
Init slice context.
Definition: h264dec.c:258
ERContext::mb_index2xy
int * mb_index2xy
Definition: error_resilience.h:59
predict_field_decoding_flag
static void predict_field_decoding_flag(const H264Context *h, H264SliceContext *sl)
Definition: h264_slice.c:2454
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
pixdesc.h
AVFrame::width
int width
Definition: frame.h:397
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
get_ue_golomb
static int get_ue_golomb(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to 8190.
Definition: golomb.h:53
internal.h
ff_h264_update_thread_context
int ff_h264_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: h264_slice.c:308
alloc_scratch_buffers
static int alloc_scratch_buffers(H264SliceContext *sl, int linesize)
Definition: h264_slice.c:131
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:561
FRAME_RECOVERED_IDR
#define FRAME_RECOVERED_IDR
We have seen an IDR, so all the following frames in coded order are correctly decodable.
Definition: h264dec.h:516
decode_finish_row
static void decode_finish_row(const H264Context *h, H264SliceContext *sl)
Draw edges and report progress for the last MB row.
Definition: h264_slice.c:2467
H264SliceContext::ref_count
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264dec.h:260
FF_COMPLIANCE_STRICT
#define FF_COMPLIANCE_STRICT
Strictly conform to all the things in the spec no matter what consequences.
Definition: defs.h:59
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:459
ff_er_frame_start
void ff_er_frame_start(ERContext *s)
Definition: error_resilience.c:787
H264Picture::qscale_table
int8_t * qscale_table
Definition: h264dec.h:114
H264SliceContext::left_mb_xy
int left_mb_xy[LEFT_MBS]
Definition: h264dec.h:204
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:247
H264PredWeightTable::use_weight_chroma
int use_weight_chroma
Definition: h264_parse.h:71
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
PICT_BOTTOM_FIELD
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:37
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:588
AV_WN32A
#define AV_WN32A(p, v)
Definition: intreadwrite.h:538
ff_h264_update_thread_context_for_user
int ff_h264_update_thread_context_for_user(AVCodecContext *dst, const AVCodecContext *src)
Definition: h264_slice.c:458
ff_er_add_slice
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
Definition: error_resilience.c:822
H264Picture::ref_index_buf
AVBufferRef * ref_index_buf[2]
Definition: h264dec.h:125
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
ff_h264_pred_weight_table
int ff_h264_pred_weight_table(GetBitContext *gb, const SPS *sps, const int *ref_count, int slice_type_nos, H264PredWeightTable *pwt, int picture_structure, void *logctx)
Definition: h264_parse.c:29
FRAME_RECOVERED_SEI
#define FRAME_RECOVERED_SEI
Sufficient number of frames have been decoded since a SEI recovery point, so all the following frames...
Definition: h264dec.h:521
H264SliceContext::is_complex
int is_complex
Definition: h264dec.h:231
ER_DC_END
#define ER_DC_END
Definition: error_resilience.h:35
ff_h264_decode_ref_pic_list_reordering
int ff_h264_decode_ref_pic_list_reordering(H264SliceContext *sl, void *logctx)
Definition: h264_refs.c:422
mpegutils.h
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
H264Picture::invalid_gap
int invalid_gap
Definition: h264dec.h:148
av_timecode_get_smpte
uint32_t av_timecode_get_smpte(AVRational rate, int drop, int hh, int mm, int ss, int ff)
Convert sei info to SMPTE 12M binary representation.
Definition: timecode.c:69
H264Picture::pps_buf
AVBufferRef * pps_buf
Definition: h264dec.h:152
thread.h
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
ThreadFrame::f
AVFrame * f
Definition: threadframe.h:28
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1328
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
H264SliceContext::mb_x
int mb_x
Definition: h264dec.h:223
H264Picture::frame_num
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264dec.h:130
H264SliceContext::next_slice_idx
int next_slice_idx
Definition: h264dec.h:229
H264SliceContext
Definition: h264dec.h:170
fill_filter_caches_inter
static av_always_inline void fill_filter_caches_inter(const H264Context *h, H264SliceContext *sl, int mb_type, int top_xy, int left_xy[LEFT_MBS], int top_type, int left_type[LEFT_MBS], int mb_xy, int list)
Definition: h264_slice.c:2158
golomb.h
exp golomb vlc stuff
MB_FIELD
#define MB_FIELD(sl)
Definition: h264dec.h:66
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:379
ff_h264_filter_mb
void ff_h264_filter_mb(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y, uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr, unsigned int linesize, unsigned int uvlinesize)
Definition: h264_loopfilter.c:716
H264SliceContext::mv_cache
int16_t mv_cache[2][5 *8][2]
Motion vector cache.
Definition: h264dec.h:291
AV_CODEC_FLAG_OUTPUT_CORRUPT
#define AV_CODEC_FLAG_OUTPUT_CORRUPT
Output even those frames that might be corrupted.
Definition: avcodec.h:224
AVHWAccel
Definition: avcodec.h:2070
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:477
finish
static void finish(void)
Definition: movenc.c:342
get_chroma_qp
static av_always_inline int get_chroma_qp(const PPS *pps, int t, int qscale)
Get the chroma qp.
Definition: h264dec.h:647
H264Picture::mmco_reset
int mmco_reset
MMCO_RESET set this 1.
Definition: h264dec.h:131
fail
#define fail()
Definition: checkasm.h:134
copy_picture_range
static void copy_picture_range(H264Picture **to, H264Picture **from, int count, H264Context *new_base, H264Context *old_base)
Definition: h264_slice.c:292
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:475
timecode.h
h264_select_output_frame
static int h264_select_output_frame(H264Context *h)
Definition: h264_slice.c:1265
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:457
USES_LIST
#define USES_LIST(a, list)
Definition: mpegutils.h:92
CABACContext::bytestream
const uint8_t * bytestream
Definition: cabac.h:45
AVFrame::key_frame
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:417
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2886
ff_videodsp_init
av_cold void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:39
H264Picture::mb_stride
int mb_stride
Definition: h264dec.h:156
IN_RANGE
#define IN_RANGE(a, b, size)
Definition: h264_slice.c:285
scan8
static const uint8_t scan8[16 *3+3]
Definition: h264_parse.h:40
ff_h264_flush_change
void ff_h264_flush_change(H264Context *h)
Definition: h264dec.c:445
ff_h264qpel_init
av_cold void ff_h264qpel_init(H264QpelContext *c, int bit_depth)
Definition: h264qpel.c:49
ff_h264_sei_process_picture_timing
int ff_h264_sei_process_picture_timing(H264SEIPictureTiming *h, const SPS *sps, void *logctx)
Parse the contents of a picture timing message given an active SPS.
Definition: h264_sei.c:65
h264_frame_start
static int h264_frame_start(H264Context *h)
Definition: h264_slice.c:470
H264SliceContext::deblocking_filter
int deblocking_filter
disable_deblocking_filter_idc with 1 <-> 0
Definition: h264dec.h:186
H264PredWeightTable::luma_log2_weight_denom
int luma_log2_weight_denom
Definition: h264_parse.h:72
ss
#define ss(width, name, subs,...)
Definition: cbs_vp9.c:260
H264Picture::f_grain
AVFrame * f_grain
Definition: h264dec.h:111
H264SliceContext::picture_structure
int picture_structure
Definition: h264dec.h:233
ff_h264_golomb_to_pict_type
const uint8_t ff_h264_golomb_to_pict_type[5]
Definition: h264data.c:37
release_unused_pictures
static void release_unused_pictures(H264Context *h, int remove_current)
Definition: h264_slice.c:118
H264PredWeightTable::use_weight
int use_weight
Definition: h264_parse.h:70
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
H264SliceContext::direct_spatial_mv_pred
int direct_spatial_mv_pred
Definition: h264dec.h:244
H264SliceContext::slice_num
int slice_num
Definition: h264dec.h:175
pack16to32
static av_always_inline uint32_t pack16to32(unsigned a, unsigned b)
Definition: h264_parse.h:127
non_j_pixfmt
static enum AVPixelFormat non_j_pixfmt(enum AVPixelFormat a)
Definition: h264_slice.c:1012
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:462
ff_h264_init_cabac_states
void ff_h264_init_cabac_states(const H264Context *h, H264SliceContext *sl)
Definition: h264_cabac.c:1262
ff_h264_hl_decode_mb
void ff_h264_hl_decode_mb(const H264Context *h, H264SliceContext *sl)
Definition: h264_mb.c:799
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
film_grain_params.h
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
ff_color_frame
void ff_color_frame(AVFrame *frame, const int color[4])
Definition: utils.c:403
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:624
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
ff_h264_queue_decode_slice
int ff_h264_queue_decode_slice(H264Context *h, const H2645NAL *nal)
Submit a slice for decoding.
Definition: h264_slice.c:2015
width
#define width
H264Context::DPB
H264Picture DPB[H264_MAX_PICTURE_COUNT]
Definition: h264dec.h:339
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
H264PredWeightTable::chroma_log2_weight_denom
int chroma_log2_weight_denom
Definition: h264_parse.h:73
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
AV_ZERO32
#define AV_ZERO32(d)
Definition: intreadwrite.h:629
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:376
FIELD_PICTURE
#define FIELD_PICTURE(h)
Definition: h264dec.h:68
ff_h264_execute_ref_pic_marking
int ff_h264_execute_ref_pic_marking(H264Context *h)
Execute the reference picture marking (memory management control operations).
Definition: h264_refs.c:609
H264_MAX_DPB_FRAMES
@ H264_MAX_DPB_FRAMES
Definition: h264.h:76
ff_h264_decode_ref_pic_marking
int ff_h264_decode_ref_pic_marking(H264SliceContext *sl, GetBitContext *gb, const H2645NAL *nal, void *logctx)
Definition: h264_refs.c:833
from
const char * from
Definition: jacosubdec.c:66
to
const char * to
Definition: webvttdec.c:35
h264_slice_header_parse
static int h264_slice_header_parse(const H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1660
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
H264PredWeightTable::chroma_weight_flag
int chroma_weight_flag[2]
7.4.3.2 chroma_weight_lX_flag
Definition: h264_parse.h:75
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
h264data.h
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:456
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
H264Ref::parent
H264Picture * parent
Definition: h264dec.h:167
PICT_TOP_FIELD
#define PICT_TOP_FIELD
Definition: mpegutils.h:36
decode.h
field_scan8x8_cavlc
static const uint8_t field_scan8x8_cavlc[64+1]
Definition: h264_slice.c:79
H264SliceContext::slice_alpha_c0_offset
int slice_alpha_c0_offset
Definition: h264dec.h:187
IS_INTRA
#define IS_INTRA(x, y)
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVFrame::crop_right
size_t crop_right
Definition: frame.h:688
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
H264SliceContext::slice_type
int slice_type
Definition: h264dec.h:176
H264SliceContext::resync_mb_x
int resync_mb_x
Definition: h264dec.h:225
H264Picture::sei_recovery_frame_cnt
int sei_recovery_frame_cnt
Definition: h264dec.h:149
AVDISCARD_BIDIR
@ AVDISCARD_BIDIR
discard all bidirectional frames
Definition: defs.h:73
get_se_golomb
static int get_se_golomb(GetBitContext *gb)
read signed exp golomb code.
Definition: golomb.h:239
H2645NAL::type
int type
NAL unit type.
Definition: h2645_parse.h:52
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
H264Context::enable_er
int enable_er
Definition: h264dec.h:544
ff_h264_draw_horiz_band
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264dec.c:99
H264SliceContext::curr_pic_num
int curr_pic_num
Definition: h264dec.h:323
ff_thread_ref_frame
int ff_thread_ref_frame(ThreadFrame *dst, const ThreadFrame *src)
Definition: utils.c:885
arg
const char * arg
Definition: jacosubdec.c:67
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:64
if
if(ret)
Definition: filter_design.txt:179
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:76
threadframe.h
GetBitContext::buffer
const uint8_t * buffer
Definition: get_bits.h:62
alloc_picture
static int alloc_picture(H264Context *h, H264Picture *pic)
Definition: h264_slice.c:190
H264Picture::motion_val_buf
AVBufferRef * motion_val_buf[2]
Definition: h264dec.h:116
PTRDIFF_SPECIFIER
#define PTRDIFF_SPECIFIER
Definition: internal.h:149
NULL
#define NULL
Definition: coverity.c:32
AV_COPY128
#define AV_COPY128(d, s)
Definition: intreadwrite.h:609
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_COPY64
#define AV_COPY64(d, s)
Definition: intreadwrite.h:605
H264SliceContext::edge_emu_buffer
uint8_t * edge_emu_buffer
Definition: h264dec.h:276
SPS
Sequence parameter set.
Definition: h264_ps.h:45
TRANSPOSE
#define TRANSPOSE(x)
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
ER_MB_ERROR
#define ER_MB_ERROR
Definition: error_resilience.h:38
ff_h264_decode_mb_cabac
int ff_h264_decode_mb_cabac(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cabac.c:1920
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:278
H264SliceContext::chroma_qp
int chroma_qp[2]
Definition: h264dec.h:181
AV_CODEC_FLAG2_FAST
#define AV_CODEC_FLAG2_FAST
Allow non spec compliant speedup tricks.
Definition: avcodec.h:303
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:498
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
av_buffer_pool_uninit
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:322
PPS
Picture parameter set.
Definition: h264_ps.h:105
av_fast_mallocz
void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size)
Allocate and clear a buffer, reusing the given one if large enough.
Definition: mem.c:570
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:106
mathops.h
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
IS_INTERLACED
#define IS_INTERLACED(a)
Definition: mpegutils.h:76
H264Picture::mb_height
int mb_height
Definition: h264dec.h:155
MAX_PPS_COUNT
#define MAX_PPS_COUNT
Definition: h264_ps.h:39
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:460
H264SliceContext::qscale
int qscale
Definition: h264dec.h:180
get_pixel_format
static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
Definition: h264_slice.c:772
fill_filter_caches
static int fill_filter_caches(const H264Context *h, H264SliceContext *sl, int mb_type)
Definition: h264_slice.c:2242
ERContext::error_occurred
int error_occurred
Definition: error_resilience.h:66
AV_ZERO128
#define AV_ZERO128(d)
Definition: intreadwrite.h:637
init_scan_tables
static void init_scan_tables(H264Context *h)
initialize scan tables
Definition: h264_slice.c:738
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:474
H264SliceContext::top_borders_allocated
int top_borders_allocated[2]
Definition: h264dec.h:280
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:279
FIELD_OR_MBAFF_PICTURE
#define FIELD_OR_MBAFF_PICTURE(h)
Definition: h264dec.h:85
H264SliceContext::mb_skip_run
int mb_skip_run
Definition: h264dec.h:230
h264_ps.h
init_dimensions
static void init_dimensions(H264Context *h)
Definition: h264_slice.c:890
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
H264SliceContext::top_type
int top_type
Definition: h264dec.h:207
AVFrame::crop_bottom
size_t crop_bottom
Definition: frame.h:686
H264SliceContext::resync_mb_y
int resync_mb_y
Definition: h264dec.h:226
H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM
@ H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM
6: bottom field, top field, bottom field repeated, in that order
Definition: h264_sei.h:38
DELAYED_PIC_REF
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output.
Definition: diracdec.c:67
H264SEIPictureTiming
Definition: h264_sei.h:54
H264SliceContext::cabac
CABACContext cabac
Cabac.
Definition: h264dec.h:310
H264SliceContext::redundant_pic_count
int redundant_pic_count
Definition: h264dec.h:237
AVFrame::crop_left
size_t crop_left
Definition: frame.h:687
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:75
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:422
ff_zigzag_scan
const uint8_t ff_zigzag_scan[16+1]
Definition: mathtables.c:109
H264Picture::reference
int reference
Definition: h264dec.h:146
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:259
CABAC
#define CABAC(h)
Definition: h264_cabac.c:28
LEFT_MBS
#define LEFT_MBS
Definition: h264dec.h:69
pps
static int FUNC() pps(CodedBitstreamContext *ctx, RWContext *rw, H264RawPPS *current)
Definition: cbs_h264_syntax_template.c:404
rectangle.h
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
H264SliceContext::mb_uvlinesize
ptrdiff_t mb_uvlinesize
Definition: h264dec.h:221
VP_START
#define VP_START
< current MB is the first after a resync marker
Definition: error_resilience.h:30
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:464
H264SliceContext::pwt
H264PredWeightTable pwt
Definition: h264dec.h:190
H264Picture::tf
ThreadFrame tf
Definition: h264dec.h:109
H264Picture::mb_type
uint32_t * mb_type
Definition: h264dec.h:120
ff_h264_decode_mb_cavlc
int ff_h264_decode_mb_cavlc(const H264Context *h, H264SliceContext *sl)
Decode a macroblock.
Definition: h264_cavlc.c:695
H264_SEI_PIC_STRUCT_BOTTOM_TOP
@ H264_SEI_PIC_STRUCT_BOTTOM_TOP
4: bottom field, top field, in that order
Definition: h264_sei.h:36
H264Picture::recovered
int recovered
picture at IDR or recovery point + recovery count
Definition: h264dec.h:147
H2645NAL::gb
GetBitContext gb
Definition: h2645_parse.h:47
H264SliceContext::top_mb_xy
int top_mb_xy
Definition: h264dec.h:202
H264SliceContext::qp_thresh
int qp_thresh
QP threshold to skip loopfilter.
Definition: h264dec.h:182
H2645NAL
Definition: h2645_parse.h:34
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:466
H264SliceContext::top_borders
uint8_t(*[2] top_borders)[(16 *3) *2]
Definition: h264dec.h:277
AVFrameSideData::data
uint8_t * data
Definition: frame.h:233
h264chroma.h
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1478
H264SliceContext::cbp
int cbp
Definition: h264dec.h:248
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
H264SliceContext::left_type
int left_type[LEFT_MBS]
Definition: h264dec.h:209
ff_h264_direct_ref_list_init
void ff_h264_direct_ref_list_init(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:120
H264SliceContext::mb_y
int mb_y
Definition: h264dec.h:223
H264PredWeightTable::implicit_weight
int implicit_weight[48][48][2]
Definition: h264_parse.h:79
height
#define height
decode_slice
static int decode_slice(struct AVCodecContext *avctx, void *arg)
Definition: h264_slice.c:2510
H264SliceContext::explicit_ref_marking
int explicit_ref_marking
Definition: h264dec.h:316
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
pt
int pt
Definition: rtp.c:35
H264SliceContext::uvlinesize
ptrdiff_t uvlinesize
Definition: h264dec.h:219
AVBufferRef::buffer
AVBuffer * buffer
Definition: buffer.h:83
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:333
H264SliceContext::slice_type_nos
int slice_type_nos
S free slice type (SI/SP are remapped to I/P)
Definition: h264dec.h:177
H264SliceContext::delta_poc_bottom
int delta_poc_bottom
Definition: h264dec.h:321
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
FRAME_MBAFF
#define FRAME_MBAFF(h)
Definition: h264dec.h:67
IS_DIRECT
#define IS_DIRECT(a)
Definition: mpegutils.h:77
H264_SEI_PIC_STRUCT_FRAME
@ H264_SEI_PIC_STRUCT_FRAME
0: frame
Definition: h264_sei.h:32
get_cabac_terminate
static int av_unused get_cabac_terminate(CABACContext *c)
Definition: cabac_functions.h:187
H264_SEI_PIC_STRUCT_FRAME_TRIPLING
@ H264_SEI_PIC_STRUCT_FRAME_TRIPLING
8: frame tripling
Definition: h264_sei.h:40
field_scan
static const uint8_t field_scan[16+1]
Definition: h264_slice.c:53
loop_filter
static void loop_filter(const H264Context *h, H264SliceContext *sl, int start_x, int end_x)
Definition: h264_slice.c:2384
ff_init_cabac_decoder
int ff_init_cabac_decoder(CABACContext *c, const uint8_t *buf, int buf_size)
Definition: cabac.c:162
H264SliceContext::mb_mbaff
int mb_mbaff
mb_aff_frame && mb_field_decoding_flag
Definition: h264dec.h:235
field_scan8x8
static const uint8_t field_scan8x8[64+1]
Definition: h264_slice.c:60
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:187
av_get_picture_type_char
char av_get_picture_type_char(enum AVPictureType pict_type)
Return a single letter to describe the given picture type pict_type.
Definition: utils.c:40
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:302
LIST_NOT_USED
#define LIST_NOT_USED
Definition: h264dec.h:389
H264Picture::field_picture
int field_picture
whether or not picture was encoded in separate fields
Definition: h264dec.h:139
h264dec.h
H264SliceContext::poc_lsb
int poc_lsb
Definition: h264dec.h:320
H264SliceContext::first_mb_addr
unsigned int first_mb_addr
Definition: h264dec.h:227
ff_h264_direct_dist_scale_factor
void ff_h264_direct_dist_scale_factor(const H264Context *const h, H264SliceContext *sl)
Definition: h264_direct.c:61
H264Picture::needs_fg
int needs_fg
whether picture needs film grain synthesis (see f_grain)
Definition: h264dec.h:150
AVBuffer
A reference counted buffer type.
Definition: buffer_internal.h:38
H264Context
H264Context.
Definition: h264dec.h:330
AVDISCARD_NONINTRA
@ AVDISCARD_NONINTRA
discard all non intra frames
Definition: defs.h:74
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
av_timecode_make_smpte_tc_string2
char * av_timecode_make_smpte_tc_string2(char *buf, AVRational rate, uint32_t tcsmpte, int prevent_df, int skip_field)
Get the timecode string from the SMPTE timecode format.
Definition: timecode.c:138
AV_CODEC_FLAG2_SHOW_ALL
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:331
AV_FRAME_FLAG_CORRUPT
#define AV_FRAME_FLAG_CORRUPT
The frame data may be corrupted, e.g.
Definition: frame.h:559
H264_SEI_PIC_STRUCT_FRAME_DOUBLING
@ H264_SEI_PIC_STRUCT_FRAME_DOUBLING
7: frame doubling
Definition: h264_sei.h:39
ff_h264chroma_init
av_cold void ff_h264chroma_init(H264ChromaContext *c, int bit_depth)
Definition: h264chroma.c:41
H264SliceContext::frame_num
int frame_num
Definition: h264dec.h:318
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:476
display.h
ff_h264_execute_decode_slices
int ff_h264_execute_decode_slices(H264Context *h)
Call decode_slice() for each context.
Definition: h264_slice.c:2722
H264SliceContext::mb_linesize
ptrdiff_t mb_linesize
may be equal to s->linesize or s->linesize * 2, for mbaff
Definition: h264dec.h:220
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
av_always_inline
#define av_always_inline
Definition: attributes.h:49
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
cabac_functions.h
H264Picture::hwaccel_priv_buf
AVBufferRef * hwaccel_priv_buf
Definition: h264dec.h:122
tb
#define tb
Definition: regdef.h:68
AV_COPY32
#define AV_COPY32(d, s)
Definition: intreadwrite.h:601
av_buffer_replace
int av_buffer_replace(AVBufferRef **pdst, const AVBufferRef *src)
Ensure dst refers to the same data as src.
Definition: buffer.c:233
ff_h264_parse_ref_count
int ff_h264_parse_ref_count(int *plist_count, int ref_count[2], GetBitContext *gb, const PPS *pps, int slice_type_nos, int picture_structure, void *logctx)
Definition: h264_parse.c:221
ff_h264_alloc_tables
int ff_h264_alloc_tables(H264Context *h)
Allocate tables.
Definition: h264dec.c:179
ff_thread_get_ext_buffer
int ff_thread_get_ext_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around ff_get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1080
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:644
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:458
H264SliceContext::list_count
unsigned int list_count
Definition: h264dec.h:261
avcodec.h
H264SliceContext::h264
const struct H264Context * h264
Definition: h264dec.h:171
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
ff_h264dsp_init
av_cold void ff_h264dsp_init(H264DSPContext *c, const int bit_depth, const int chroma_format_idc)
Definition: h264dsp.c:66
ff_zigzag_direct
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
av_buffer_allocz
AVBufferRef * av_buffer_allocz(size_t size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
Definition: buffer.c:93
ff_h264_ref_picture
int ff_h264_ref_picture(H264Context *h, H264Picture *dst, H264Picture *src)
Definition: h264_picture.c:92
ret
ret
Definition: filter_design.txt:187
AV_EF_AGGRESSIVE
#define AV_EF_AGGRESSIVE
consider things that a sane encoder/muxer should not do as an error
Definition: defs.h:56
ff_h264_init_poc
int ff_h264_init_poc(int pic_field_poc[2], int *pic_poc, const SPS *sps, H264POCContext *pc, int picture_structure, int nal_ref_idc)
Definition: h264_parse.c:279
ff_h264_get_profile
int ff_h264_get_profile(const SPS *sps)
Compute profile from profile_idc and constraint_set?_flags.
Definition: h264_parse.c:531
h264_field_start
static int h264_field_start(H264Context *h, const H264SliceContext *sl, const H2645NAL *nal, int first_slice)
Definition: h264_slice.c:1368
H264SliceContext::last_qscale_diff
int last_qscale_diff
Definition: h264dec.h:183
sps
static int FUNC() sps(CodedBitstreamContext *ctx, RWContext *rw, H264RawSPS *current)
Definition: cbs_h264_syntax_template.c:260
align_get_bits
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:683
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:463
U
#define U(x)
Definition: vpx_arith.h:37
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:468
H264SliceContext::pps_id
unsigned int pps_id
Definition: h264dec.h:271
H264SliceContext::linesize
ptrdiff_t linesize
Definition: h264dec.h:219
H264SliceContext::slice_beta_offset
int slice_beta_offset
Definition: h264dec.h:188
AVCodecContext
main external API structure.
Definition: avcodec.h:398
AVFrame::height
int height
Definition: frame.h:397
get_ue_golomb_31
static int get_ue_golomb_31(GetBitContext *gb)
read unsigned exp golomb code, constraint to a max of 31.
Definition: golomb.h:120
MAX_SLICES
#define MAX_SLICES
Definition: dxva2_hevc.c:31
backup_mb_border
static av_always_inline void backup_mb_border(const H264Context *h, H264SliceContext *sl, uint8_t *src_y, uint8_t *src_cb, uint8_t *src_cr, int linesize, int uvlinesize, int simple)
Definition: h264_slice.c:572
ff_h264_build_ref_list
int ff_h264_build_ref_list(H264Context *h, H264SliceContext *sl)
Definition: h264_refs.c:298
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1517
H264SliceContext::bipred_scratchpad
uint8_t * bipred_scratchpad
Definition: h264dec.h:275
ff_h264_pred_init
av_cold void ff_h264_pred_init(H264PredContext *h, int codec_id, const int bit_depth, int chroma_format_idc)
Set the intra prediction function pointers.
Definition: h264pred.c:437
H264Picture::field_poc
int field_poc[2]
top/bottom POC
Definition: h264dec.h:128
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:276
H264SliceContext::mmco
MMCO mmco[H264_MAX_MMCO_COUNT]
Definition: h264dec.h:314
error_resilience.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AVHWAccel::frame_priv_data_size
int frame_priv_data_size
Size of per-frame hardware accelerator private data.
Definition: avcodec.h:2179
H264Picture::mb_width
int mb_width
Definition: h264dec.h:155
fill_rectangle
static void fill_rectangle(int x, int y, int w, int h)
Definition: ffplay.c:814
H264Picture
Definition: h264dec.h:107
ERContext::error_status_table
uint8_t * error_status_table
Definition: error_resilience.h:67
find_unused_picture
static int find_unused_picture(H264Context *h)
Definition: h264_slice.c:273
ff_thread_get_format
FF_DISABLE_DEPRECATION_WARNINGS enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Wrapper around get_format() for frame-multithreaded codecs.
Definition: pthread_frame.c:1042
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
h264_slice_init
static int h264_slice_init(H264Context *h, H264SliceContext *sl, const H2645NAL *nal)
Definition: h264_slice.c:1879
ff_h264_field_end
int ff_h264_field_end(H264Context *h, H264SliceContext *sl, int in_setup)
Definition: h264_picture.c:219
CABACContext::bytestream_end
const uint8_t * bytestream_end
Definition: cabac.h:46
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
init_table_pools
static int init_table_pools(H264Context *h)
Definition: h264_slice.c:163
H264Picture::mb_type_buf
AVBufferRef * mb_type_buf
Definition: h264dec.h:119
H264SliceContext::ref_list
H264Ref ref_list[2][48]
0..15: frame refs, 16..47: mbaff field refs.
Definition: h264dec.h:262
LBOT
#define LBOT
Definition: h264dec.h:71
H264SliceContext::non_zero_count_cache
uint8_t non_zero_count_cache[15 *8]
non zero coeff count cache.
Definition: h264dec.h:286
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:158
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:275
IS_INTER
#define IS_INTER(a)
Definition: mpegutils.h:72
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
get_ue_golomb_long
static unsigned get_ue_golomb_long(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1.
Definition: golomb.h:104
H264Context::nal_length_size
int nal_length_size
Number of bytes used for nal length (1, 2 or 4)
Definition: h264dec.h:449
avpriv_request_sample
#define avpriv_request_sample(...)
Definition: tableprint_vlc.h:36
ER_MB_END
#define ER_MB_END
Definition: error_resilience.h:39
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:231
H264SliceContext::er
ERContext * er
Definition: h264dec.h:173
H264_SEI_PIC_STRUCT_BOTTOM_FIELD
@ H264_SEI_PIC_STRUCT_BOTTOM_FIELD
2: bottom field
Definition: h264_sei.h:34
H264Picture::hwaccel_picture_private
void * hwaccel_picture_private
hardware accelerator private data
Definition: h264dec.h:123
ER_MV_END
#define ER_MV_END
Definition: error_resilience.h:36
H264SliceContext::idr_pic_id
int idr_pic_id
Definition: h264dec.h:319
ff_tlog
#define ff_tlog(ctx,...)
Definition: internal.h:162
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:425
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:240
AVFrame::crop_top
size_t crop_top
Definition: frame.h:685
H264SliceContext::gb
GetBitContext gb
Definition: h264dec.h:172
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
av_fast_malloc
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:565
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
H264SliceContext::intra4x4_pred_mode
int8_t * intra4x4_pred_mode
Definition: h264dec.h:199
FFMAX3
#define FFMAX3(a, b, c)
Definition: macros.h:48
LTOP
#define LTOP
Definition: h264dec.h:70
h264.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
H264SliceContext::edge_emu_buffer_allocated
int edge_emu_buffer_allocated
Definition: h264dec.h:279
REBASE_PICTURE
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: h264_slice.c:287
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
CHROMA444
#define CHROMA444(h)
Definition: h264dec.h:93
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
ff_h264_get_slice_type
int ff_h264_get_slice_type(const H264SliceContext *sl)
Reconstruct bitstream slice_type.
Definition: h264_slice.c:2140
h
h
Definition: vp9dsp_template.c:2038
H264SliceContext::cabac_init_idc
int cabac_init_idc
Definition: h264dec.h:312
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:469
H264PredWeightTable::luma_weight_flag
int luma_weight_flag[2]
7.4.3.2 luma_weight_lX_flag
Definition: h264_parse.h:74
H264_MAX_PICTURE_COUNT
#define H264_MAX_PICTURE_COUNT
Definition: h264dec.h:50
ER_AC_END
#define ER_AC_END
Definition: error_resilience.h:34
H264SliceContext::bipred_scratchpad_allocated
int bipred_scratchpad_allocated
Definition: h264dec.h:278
H264_NAL_IDR_SLICE
@ H264_NAL_IDR_SLICE
Definition: h264.h:39
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:72
H264SliceContext::slice_type_fixed
int slice_type_fixed
Definition: h264dec.h:178
H264Ref::poc
int poc
Definition: h264dec.h:164
IS_8x8DCT
#define IS_8x8DCT(a)
Definition: h264dec.h:96
H264Picture::qscale_table_buf
AVBufferRef * qscale_table_buf
Definition: h264dec.h:113
H264_SEI_PIC_STRUCT_TOP_FIELD
@ H264_SEI_PIC_STRUCT_TOP_FIELD
1: top field
Definition: h264_sei.h:33
H264SliceContext::delta_poc
int delta_poc[2]
Definition: h264dec.h:322
av_color_transfer_name
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:3213
H264Picture::long_ref
int long_ref
1->long term reference 0->short term reference
Definition: h264dec.h:135
H264Ref::reference
int reference
Definition: h264dec.h:163
H264Picture::motion_val
int16_t(*[2] motion_val)[2]
Definition: h264dec.h:117
AV_CODEC_EXPORT_DATA_FILM_GRAIN
#define AV_CODEC_EXPORT_DATA_FILM_GRAIN
Decoding only.
Definition: avcodec.h:371
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:467
H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP
@ H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP
5: top field, bottom field, top field repeated, in that order
Definition: h264_sei.h:37
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2778
H264SliceContext::mb_field_decoding_flag
int mb_field_decoding_flag
Definition: h264dec.h:234
H264Context::is_avc
int is_avc
Used to parse AVC variant of H.264.
Definition: h264dec.h:448