FFmpeg
hevcdec.c
Go to the documentation of this file.
1 /*
2  * HEVC video Decoder
3  *
4  * Copyright (C) 2012 - 2013 Guillaume Martres
5  * Copyright (C) 2012 - 2013 Mickael Raulet
6  * Copyright (C) 2012 - 2013 Gildas Cocherel
7  * Copyright (C) 2012 - 2013 Wassim Hamidouche
8  *
9  * This file is part of FFmpeg.
10  *
11  * FFmpeg is free software; you can redistribute it and/or
12  * modify it under the terms of the GNU Lesser General Public
13  * License as published by the Free Software Foundation; either
14  * version 2.1 of the License, or (at your option) any later version.
15  *
16  * FFmpeg is distributed in the hope that it will be useful,
17  * but WITHOUT ANY WARRANTY; without even the implied warranty of
18  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19  * Lesser General Public License for more details.
20  *
21  * You should have received a copy of the GNU Lesser General Public
22  * License along with FFmpeg; if not, write to the Free Software
23  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
24  */
25 
26 #include "config_components.h"
27 
28 #include "libavutil/attributes.h"
29 #include "libavutil/common.h"
30 #include "libavutil/display.h"
32 #include "libavutil/internal.h"
34 #include "libavutil/md5.h"
35 #include "libavutil/opt.h"
36 #include "libavutil/pixdesc.h"
37 #include "libavutil/stereo3d.h"
38 #include "libavutil/timecode.h"
39 
40 #include "bswapdsp.h"
41 #include "bytestream.h"
42 #include "cabac_functions.h"
43 #include "codec_internal.h"
44 #include "golomb.h"
45 #include "hevc.h"
46 #include "hevc_data.h"
47 #include "hevc_parse.h"
48 #include "hevcdec.h"
49 #include "hwconfig.h"
50 #include "internal.h"
51 #include "profiles.h"
52 #include "thread.h"
53 #include "threadframe.h"
54 
55 static const uint8_t hevc_pel_weight[65] = { [2] = 0, [4] = 1, [6] = 2, [8] = 3, [12] = 4, [16] = 5, [24] = 6, [32] = 7, [48] = 8, [64] = 9 };
56 
57 /**
58  * NOTE: Each function hls_foo correspond to the function foo in the
59  * specification (HLS stands for High Level Syntax).
60  */
61 
62 /**
63  * Section 5.7
64  */
65 
66 /* free everything allocated by pic_arrays_init() */
68 {
69  av_freep(&s->sao);
70  av_freep(&s->deblock);
71 
72  av_freep(&s->skip_flag);
73  av_freep(&s->tab_ct_depth);
74 
75  av_freep(&s->tab_ipm);
76  av_freep(&s->cbf_luma);
77  av_freep(&s->is_pcm);
78 
79  av_freep(&s->qp_y_tab);
80  av_freep(&s->tab_slice_address);
81  av_freep(&s->filter_slice_edges);
82 
83  av_freep(&s->horizontal_bs);
84  av_freep(&s->vertical_bs);
85 
86  av_freep(&s->sh.entry_point_offset);
87  av_freep(&s->sh.size);
88  av_freep(&s->sh.offset);
89 
90  av_buffer_pool_uninit(&s->tab_mvf_pool);
91  av_buffer_pool_uninit(&s->rpl_tab_pool);
92 }
93 
94 /* allocate arrays that depend on frame dimensions */
95 static int pic_arrays_init(HEVCContext *s, const HEVCSPS *sps)
96 {
97  int log2_min_cb_size = sps->log2_min_cb_size;
98  int width = sps->width;
99  int height = sps->height;
100  int pic_size_in_ctb = ((width >> log2_min_cb_size) + 1) *
101  ((height >> log2_min_cb_size) + 1);
102  int ctb_count = sps->ctb_width * sps->ctb_height;
103  int min_pu_size = sps->min_pu_width * sps->min_pu_height;
104 
105  s->bs_width = (width >> 2) + 1;
106  s->bs_height = (height >> 2) + 1;
107 
108  s->sao = av_calloc(ctb_count, sizeof(*s->sao));
109  s->deblock = av_calloc(ctb_count, sizeof(*s->deblock));
110  if (!s->sao || !s->deblock)
111  goto fail;
112 
113  s->skip_flag = av_malloc_array(sps->min_cb_height, sps->min_cb_width);
114  s->tab_ct_depth = av_malloc_array(sps->min_cb_height, sps->min_cb_width);
115  if (!s->skip_flag || !s->tab_ct_depth)
116  goto fail;
117 
118  s->cbf_luma = av_malloc_array(sps->min_tb_width, sps->min_tb_height);
119  s->tab_ipm = av_mallocz(min_pu_size);
120  s->is_pcm = av_malloc_array(sps->min_pu_width + 1, sps->min_pu_height + 1);
121  if (!s->tab_ipm || !s->cbf_luma || !s->is_pcm)
122  goto fail;
123 
124  s->filter_slice_edges = av_mallocz(ctb_count);
125  s->tab_slice_address = av_malloc_array(pic_size_in_ctb,
126  sizeof(*s->tab_slice_address));
127  s->qp_y_tab = av_malloc_array(pic_size_in_ctb,
128  sizeof(*s->qp_y_tab));
129  if (!s->qp_y_tab || !s->filter_slice_edges || !s->tab_slice_address)
130  goto fail;
131 
132  s->horizontal_bs = av_calloc(s->bs_width, s->bs_height);
133  s->vertical_bs = av_calloc(s->bs_width, s->bs_height);
134  if (!s->horizontal_bs || !s->vertical_bs)
135  goto fail;
136 
137  s->tab_mvf_pool = av_buffer_pool_init(min_pu_size * sizeof(MvField),
139  s->rpl_tab_pool = av_buffer_pool_init(ctb_count * sizeof(RefPicListTab),
141  if (!s->tab_mvf_pool || !s->rpl_tab_pool)
142  goto fail;
143 
144  return 0;
145 
146 fail:
148  return AVERROR(ENOMEM);
149 }
150 
152 {
153  int i = 0;
154  int j = 0;
155  uint8_t luma_weight_l0_flag[16];
156  uint8_t chroma_weight_l0_flag[16];
157  uint8_t luma_weight_l1_flag[16];
158  uint8_t chroma_weight_l1_flag[16];
159  int luma_log2_weight_denom;
160 
161  luma_log2_weight_denom = get_ue_golomb_long(gb);
162  if (luma_log2_weight_denom < 0 || luma_log2_weight_denom > 7) {
163  av_log(s->avctx, AV_LOG_ERROR, "luma_log2_weight_denom %d is invalid\n", luma_log2_weight_denom);
164  return AVERROR_INVALIDDATA;
165  }
166  s->sh.luma_log2_weight_denom = av_clip_uintp2(luma_log2_weight_denom, 3);
167  if (s->ps.sps->chroma_format_idc != 0) {
168  int64_t chroma_log2_weight_denom = luma_log2_weight_denom + (int64_t)get_se_golomb(gb);
169  if (chroma_log2_weight_denom < 0 || chroma_log2_weight_denom > 7) {
170  av_log(s->avctx, AV_LOG_ERROR, "chroma_log2_weight_denom %"PRId64" is invalid\n", chroma_log2_weight_denom);
171  return AVERROR_INVALIDDATA;
172  }
173  s->sh.chroma_log2_weight_denom = chroma_log2_weight_denom;
174  }
175 
176  for (i = 0; i < s->sh.nb_refs[L0]; i++) {
177  luma_weight_l0_flag[i] = get_bits1(gb);
178  if (!luma_weight_l0_flag[i]) {
179  s->sh.luma_weight_l0[i] = 1 << s->sh.luma_log2_weight_denom;
180  s->sh.luma_offset_l0[i] = 0;
181  }
182  }
183  if (s->ps.sps->chroma_format_idc != 0) {
184  for (i = 0; i < s->sh.nb_refs[L0]; i++)
185  chroma_weight_l0_flag[i] = get_bits1(gb);
186  } else {
187  for (i = 0; i < s->sh.nb_refs[L0]; i++)
188  chroma_weight_l0_flag[i] = 0;
189  }
190  for (i = 0; i < s->sh.nb_refs[L0]; i++) {
191  if (luma_weight_l0_flag[i]) {
192  int delta_luma_weight_l0 = get_se_golomb(gb);
193  if ((int8_t)delta_luma_weight_l0 != delta_luma_weight_l0)
194  return AVERROR_INVALIDDATA;
195  s->sh.luma_weight_l0[i] = (1 << s->sh.luma_log2_weight_denom) + delta_luma_weight_l0;
196  s->sh.luma_offset_l0[i] = get_se_golomb(gb);
197  }
198  if (chroma_weight_l0_flag[i]) {
199  for (j = 0; j < 2; j++) {
200  int delta_chroma_weight_l0 = get_se_golomb(gb);
201  int delta_chroma_offset_l0 = get_se_golomb(gb);
202 
203  if ( (int8_t)delta_chroma_weight_l0 != delta_chroma_weight_l0
204  || delta_chroma_offset_l0 < -(1<<17) || delta_chroma_offset_l0 > (1<<17)) {
205  return AVERROR_INVALIDDATA;
206  }
207 
208  s->sh.chroma_weight_l0[i][j] = (1 << s->sh.chroma_log2_weight_denom) + delta_chroma_weight_l0;
209  s->sh.chroma_offset_l0[i][j] = av_clip((delta_chroma_offset_l0 - ((128 * s->sh.chroma_weight_l0[i][j])
210  >> s->sh.chroma_log2_weight_denom) + 128), -128, 127);
211  }
212  } else {
213  s->sh.chroma_weight_l0[i][0] = 1 << s->sh.chroma_log2_weight_denom;
214  s->sh.chroma_offset_l0[i][0] = 0;
215  s->sh.chroma_weight_l0[i][1] = 1 << s->sh.chroma_log2_weight_denom;
216  s->sh.chroma_offset_l0[i][1] = 0;
217  }
218  }
219  if (s->sh.slice_type == HEVC_SLICE_B) {
220  for (i = 0; i < s->sh.nb_refs[L1]; i++) {
221  luma_weight_l1_flag[i] = get_bits1(gb);
222  if (!luma_weight_l1_flag[i]) {
223  s->sh.luma_weight_l1[i] = 1 << s->sh.luma_log2_weight_denom;
224  s->sh.luma_offset_l1[i] = 0;
225  }
226  }
227  if (s->ps.sps->chroma_format_idc != 0) {
228  for (i = 0; i < s->sh.nb_refs[L1]; i++)
229  chroma_weight_l1_flag[i] = get_bits1(gb);
230  } else {
231  for (i = 0; i < s->sh.nb_refs[L1]; i++)
232  chroma_weight_l1_flag[i] = 0;
233  }
234  for (i = 0; i < s->sh.nb_refs[L1]; i++) {
235  if (luma_weight_l1_flag[i]) {
236  int delta_luma_weight_l1 = get_se_golomb(gb);
237  if ((int8_t)delta_luma_weight_l1 != delta_luma_weight_l1)
238  return AVERROR_INVALIDDATA;
239  s->sh.luma_weight_l1[i] = (1 << s->sh.luma_log2_weight_denom) + delta_luma_weight_l1;
240  s->sh.luma_offset_l1[i] = get_se_golomb(gb);
241  }
242  if (chroma_weight_l1_flag[i]) {
243  for (j = 0; j < 2; j++) {
244  int delta_chroma_weight_l1 = get_se_golomb(gb);
245  int delta_chroma_offset_l1 = get_se_golomb(gb);
246 
247  if ( (int8_t)delta_chroma_weight_l1 != delta_chroma_weight_l1
248  || delta_chroma_offset_l1 < -(1<<17) || delta_chroma_offset_l1 > (1<<17)) {
249  return AVERROR_INVALIDDATA;
250  }
251 
252  s->sh.chroma_weight_l1[i][j] = (1 << s->sh.chroma_log2_weight_denom) + delta_chroma_weight_l1;
253  s->sh.chroma_offset_l1[i][j] = av_clip((delta_chroma_offset_l1 - ((128 * s->sh.chroma_weight_l1[i][j])
254  >> s->sh.chroma_log2_weight_denom) + 128), -128, 127);
255  }
256  } else {
257  s->sh.chroma_weight_l1[i][0] = 1 << s->sh.chroma_log2_weight_denom;
258  s->sh.chroma_offset_l1[i][0] = 0;
259  s->sh.chroma_weight_l1[i][1] = 1 << s->sh.chroma_log2_weight_denom;
260  s->sh.chroma_offset_l1[i][1] = 0;
261  }
262  }
263  }
264  return 0;
265 }
266 
268 {
269  const HEVCSPS *sps = s->ps.sps;
270  int max_poc_lsb = 1 << sps->log2_max_poc_lsb;
271  int prev_delta_msb = 0;
272  unsigned int nb_sps = 0, nb_sh;
273  int i;
274 
275  rps->nb_refs = 0;
276  if (!sps->long_term_ref_pics_present_flag)
277  return 0;
278 
279  if (sps->num_long_term_ref_pics_sps > 0)
280  nb_sps = get_ue_golomb_long(gb);
281  nb_sh = get_ue_golomb_long(gb);
282 
283  if (nb_sps > sps->num_long_term_ref_pics_sps)
284  return AVERROR_INVALIDDATA;
285  if (nb_sh + (uint64_t)nb_sps > FF_ARRAY_ELEMS(rps->poc))
286  return AVERROR_INVALIDDATA;
287 
288  rps->nb_refs = nb_sh + nb_sps;
289 
290  for (i = 0; i < rps->nb_refs; i++) {
291 
292  if (i < nb_sps) {
293  uint8_t lt_idx_sps = 0;
294 
295  if (sps->num_long_term_ref_pics_sps > 1)
296  lt_idx_sps = get_bits(gb, av_ceil_log2(sps->num_long_term_ref_pics_sps));
297 
298  rps->poc[i] = sps->lt_ref_pic_poc_lsb_sps[lt_idx_sps];
299  rps->used[i] = sps->used_by_curr_pic_lt_sps_flag[lt_idx_sps];
300  } else {
301  rps->poc[i] = get_bits(gb, sps->log2_max_poc_lsb);
302  rps->used[i] = get_bits1(gb);
303  }
304 
305  rps->poc_msb_present[i] = get_bits1(gb);
306  if (rps->poc_msb_present[i]) {
307  int64_t delta = get_ue_golomb_long(gb);
308  int64_t poc;
309 
310  if (i && i != nb_sps)
311  delta += prev_delta_msb;
312 
313  poc = rps->poc[i] + s->poc - delta * max_poc_lsb - s->sh.pic_order_cnt_lsb;
314  if (poc != (int32_t)poc)
315  return AVERROR_INVALIDDATA;
316  rps->poc[i] = poc;
317  prev_delta_msb = delta;
318  }
319  }
320 
321  return 0;
322 }
323 
325 {
326  AVCodecContext *avctx = s->avctx;
327  const HEVCParamSets *ps = &s->ps;
328  const HEVCVPS *vps = (const HEVCVPS*)ps->vps_list[sps->vps_id]->data;
329  const HEVCWindow *ow = &sps->output_window;
330  unsigned int num = 0, den = 0;
331 
332  avctx->pix_fmt = sps->pix_fmt;
333  avctx->coded_width = sps->width;
334  avctx->coded_height = sps->height;
335  avctx->width = sps->width - ow->left_offset - ow->right_offset;
336  avctx->height = sps->height - ow->top_offset - ow->bottom_offset;
337  avctx->has_b_frames = sps->temporal_layer[sps->max_sub_layers - 1].num_reorder_pics;
338  avctx->profile = sps->ptl.general_ptl.profile_idc;
339  avctx->level = sps->ptl.general_ptl.level_idc;
340 
341  ff_set_sar(avctx, sps->vui.sar);
342 
343  if (sps->vui.video_signal_type_present_flag)
344  avctx->color_range = sps->vui.video_full_range_flag ? AVCOL_RANGE_JPEG
346  else
347  avctx->color_range = AVCOL_RANGE_MPEG;
348 
349  if (sps->vui.colour_description_present_flag) {
350  avctx->color_primaries = sps->vui.colour_primaries;
351  avctx->color_trc = sps->vui.transfer_characteristic;
352  avctx->colorspace = sps->vui.matrix_coeffs;
353  } else {
357  }
358 
360  if (sps->chroma_format_idc == 1) {
361  if (sps->vui.chroma_loc_info_present_flag) {
362  if (sps->vui.chroma_sample_loc_type_top_field <= 5)
363  avctx->chroma_sample_location = sps->vui.chroma_sample_loc_type_top_field + 1;
364  } else
366  }
367 
368  if (vps->vps_timing_info_present_flag) {
369  num = vps->vps_num_units_in_tick;
370  den = vps->vps_time_scale;
371  } else if (sps->vui.vui_timing_info_present_flag) {
372  num = sps->vui.vui_num_units_in_tick;
373  den = sps->vui.vui_time_scale;
374  }
375 
376  if (num != 0 && den != 0)
377  av_reduce(&avctx->framerate.den, &avctx->framerate.num,
378  num, den, 1 << 30);
379 }
380 
382 {
383  AVCodecContext *avctx = s->avctx;
384 
385  if (s->sei.a53_caption.buf_ref)
386  s->avctx->properties |= FF_CODEC_PROPERTY_CLOSED_CAPTIONS;
387 
388  if (s->sei.alternative_transfer.present &&
389  av_color_transfer_name(s->sei.alternative_transfer.preferred_transfer_characteristics) &&
390  s->sei.alternative_transfer.preferred_transfer_characteristics != AVCOL_TRC_UNSPECIFIED) {
391  avctx->color_trc = s->sei.alternative_transfer.preferred_transfer_characteristics;
392  }
393 
394  if (s->sei.film_grain_characteristics.present)
396 
397  return 0;
398 }
399 
401 {
402 #define HWACCEL_MAX (CONFIG_HEVC_DXVA2_HWACCEL + \
403  CONFIG_HEVC_D3D11VA_HWACCEL * 2 + \
404  CONFIG_HEVC_NVDEC_HWACCEL + \
405  CONFIG_HEVC_VAAPI_HWACCEL + \
406  CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL + \
407  CONFIG_HEVC_VDPAU_HWACCEL)
408  enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
409 
410  switch (sps->pix_fmt) {
411  case AV_PIX_FMT_YUV420P:
412  case AV_PIX_FMT_YUVJ420P:
413 #if CONFIG_HEVC_DXVA2_HWACCEL
414  *fmt++ = AV_PIX_FMT_DXVA2_VLD;
415 #endif
416 #if CONFIG_HEVC_D3D11VA_HWACCEL
417  *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
418  *fmt++ = AV_PIX_FMT_D3D11;
419 #endif
420 #if CONFIG_HEVC_VAAPI_HWACCEL
421  *fmt++ = AV_PIX_FMT_VAAPI;
422 #endif
423 #if CONFIG_HEVC_VDPAU_HWACCEL
424  *fmt++ = AV_PIX_FMT_VDPAU;
425 #endif
426 #if CONFIG_HEVC_NVDEC_HWACCEL
427  *fmt++ = AV_PIX_FMT_CUDA;
428 #endif
429 #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
430  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
431 #endif
432  break;
434 #if CONFIG_HEVC_DXVA2_HWACCEL
435  *fmt++ = AV_PIX_FMT_DXVA2_VLD;
436 #endif
437 #if CONFIG_HEVC_D3D11VA_HWACCEL
438  *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
439  *fmt++ = AV_PIX_FMT_D3D11;
440 #endif
441 #if CONFIG_HEVC_VAAPI_HWACCEL
442  *fmt++ = AV_PIX_FMT_VAAPI;
443 #endif
444 #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
445  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
446 #endif
447 #if CONFIG_HEVC_VDPAU_HWACCEL
448  *fmt++ = AV_PIX_FMT_VDPAU;
449 #endif
450 #if CONFIG_HEVC_NVDEC_HWACCEL
451  *fmt++ = AV_PIX_FMT_CUDA;
452 #endif
453  break;
454  case AV_PIX_FMT_YUV444P:
455 #if CONFIG_HEVC_VDPAU_HWACCEL
456  *fmt++ = AV_PIX_FMT_VDPAU;
457 #endif
458 #if CONFIG_HEVC_NVDEC_HWACCEL
459  *fmt++ = AV_PIX_FMT_CUDA;
460 #endif
461 #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
462  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
463 #endif
464  break;
465  case AV_PIX_FMT_YUV422P:
467 #if CONFIG_HEVC_VAAPI_HWACCEL
468  *fmt++ = AV_PIX_FMT_VAAPI;
469 #endif
470 #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
471  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
472 #endif
473  break;
475 #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
476  *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
477 #endif
480 #if CONFIG_HEVC_VDPAU_HWACCEL
481  *fmt++ = AV_PIX_FMT_VDPAU;
482 #endif
483 #if CONFIG_HEVC_NVDEC_HWACCEL
484  *fmt++ = AV_PIX_FMT_CUDA;
485 #endif
486  break;
487  }
488 
489  *fmt++ = sps->pix_fmt;
490  *fmt = AV_PIX_FMT_NONE;
491 
492  return ff_thread_get_format(s->avctx, pix_fmts);
493 }
494 
495 static int set_sps(HEVCContext *s, const HEVCSPS *sps,
496  enum AVPixelFormat pix_fmt)
497 {
498  int ret, i;
499 
501  s->ps.sps = NULL;
502  s->ps.vps = NULL;
503 
504  if (!sps)
505  return 0;
506 
507  ret = pic_arrays_init(s, sps);
508  if (ret < 0)
509  goto fail;
510 
512 
513  s->avctx->pix_fmt = pix_fmt;
514 
515  ff_hevc_pred_init(&s->hpc, sps->bit_depth);
516  ff_hevc_dsp_init (&s->hevcdsp, sps->bit_depth);
517  ff_videodsp_init (&s->vdsp, sps->bit_depth);
518 
519  for (i = 0; i < 3; i++) {
520  av_freep(&s->sao_pixel_buffer_h[i]);
521  av_freep(&s->sao_pixel_buffer_v[i]);
522  }
523 
524  if (sps->sao_enabled && !s->avctx->hwaccel) {
525  int c_count = (sps->chroma_format_idc != 0) ? 3 : 1;
526  int c_idx;
527 
528  for(c_idx = 0; c_idx < c_count; c_idx++) {
529  int w = sps->width >> sps->hshift[c_idx];
530  int h = sps->height >> sps->vshift[c_idx];
531  s->sao_pixel_buffer_h[c_idx] =
532  av_malloc((w * 2 * sps->ctb_height) <<
533  sps->pixel_shift);
534  s->sao_pixel_buffer_v[c_idx] =
535  av_malloc((h * 2 * sps->ctb_width) <<
536  sps->pixel_shift);
537  if (!s->sao_pixel_buffer_h[c_idx] ||
538  !s->sao_pixel_buffer_v[c_idx])
539  goto fail;
540  }
541  }
542 
543  s->ps.sps = sps;
544  s->ps.vps = (HEVCVPS*) s->ps.vps_list[s->ps.sps->vps_id]->data;
545 
546  return 0;
547 
548 fail:
550  for (i = 0; i < 3; i++) {
551  av_freep(&s->sao_pixel_buffer_h[i]);
552  av_freep(&s->sao_pixel_buffer_v[i]);
553  }
554  s->ps.sps = NULL;
555  return ret;
556 }
557 
559 {
560  GetBitContext *gb = &s->HEVClc->gb;
561  SliceHeader *sh = &s->sh;
562  int i, ret;
563 
564  // Coded parameters
566  if (s->ref && sh->first_slice_in_pic_flag) {
567  av_log(s->avctx, AV_LOG_ERROR, "Two slices reporting being the first in the same frame.\n");
568  return 1; // This slice will be skipped later, do not corrupt state
569  }
570 
571  if ((IS_IDR(s) || IS_BLA(s)) && sh->first_slice_in_pic_flag) {
572  s->seq_decode = (s->seq_decode + 1) & 0xff;
573  s->max_ra = INT_MAX;
574  if (IS_IDR(s))
576  }
578  if (IS_IRAP(s))
580 
581  sh->pps_id = get_ue_golomb_long(gb);
582  if (sh->pps_id >= HEVC_MAX_PPS_COUNT || !s->ps.pps_list[sh->pps_id]) {
583  av_log(s->avctx, AV_LOG_ERROR, "PPS id out of range: %d\n", sh->pps_id);
584  return AVERROR_INVALIDDATA;
585  }
586  if (!sh->first_slice_in_pic_flag &&
587  s->ps.pps != (HEVCPPS*)s->ps.pps_list[sh->pps_id]->data) {
588  av_log(s->avctx, AV_LOG_ERROR, "PPS changed between slices.\n");
589  return AVERROR_INVALIDDATA;
590  }
591  s->ps.pps = (HEVCPPS*)s->ps.pps_list[sh->pps_id]->data;
592  if (s->nal_unit_type == HEVC_NAL_CRA_NUT && s->last_eos == 1)
594 
595  if (s->ps.sps != (HEVCSPS*)s->ps.sps_list[s->ps.pps->sps_id]->data) {
596  const HEVCSPS *sps = (HEVCSPS*)s->ps.sps_list[s->ps.pps->sps_id]->data;
597  const HEVCSPS *last_sps = s->ps.sps;
598  enum AVPixelFormat pix_fmt;
599 
600  if (last_sps && IS_IRAP(s) && s->nal_unit_type != HEVC_NAL_CRA_NUT) {
601  if (sps->width != last_sps->width || sps->height != last_sps->height ||
602  sps->temporal_layer[sps->max_sub_layers - 1].max_dec_pic_buffering !=
603  last_sps->temporal_layer[last_sps->max_sub_layers - 1].max_dec_pic_buffering)
605  }
607 
608  ret = set_sps(s, sps, sps->pix_fmt);
609  if (ret < 0)
610  return ret;
611 
612  pix_fmt = get_format(s, sps);
613  if (pix_fmt < 0)
614  return pix_fmt;
615  s->avctx->pix_fmt = pix_fmt;
616 
617  s->seq_decode = (s->seq_decode + 1) & 0xff;
618  s->max_ra = INT_MAX;
619  }
620 
622  if (ret < 0)
623  return ret;
624 
626  if (!sh->first_slice_in_pic_flag) {
627  int slice_address_length;
628 
629  if (s->ps.pps->dependent_slice_segments_enabled_flag)
631 
632  slice_address_length = av_ceil_log2(s->ps.sps->ctb_width *
633  s->ps.sps->ctb_height);
634  sh->slice_segment_addr = get_bitsz(gb, slice_address_length);
635  if (sh->slice_segment_addr >= s->ps.sps->ctb_width * s->ps.sps->ctb_height) {
636  av_log(s->avctx, AV_LOG_ERROR,
637  "Invalid slice segment address: %u.\n",
638  sh->slice_segment_addr);
639  return AVERROR_INVALIDDATA;
640  }
641 
642  if (!sh->dependent_slice_segment_flag) {
643  sh->slice_addr = sh->slice_segment_addr;
644  s->slice_idx++;
645  }
646  } else {
647  sh->slice_segment_addr = sh->slice_addr = 0;
648  s->slice_idx = 0;
649  s->slice_initialized = 0;
650  }
651 
652  if (!sh->dependent_slice_segment_flag) {
653  s->slice_initialized = 0;
654 
655  for (i = 0; i < s->ps.pps->num_extra_slice_header_bits; i++)
656  skip_bits(gb, 1); // slice_reserved_undetermined_flag[]
657 
658  sh->slice_type = get_ue_golomb_long(gb);
659  if (!(sh->slice_type == HEVC_SLICE_I ||
660  sh->slice_type == HEVC_SLICE_P ||
661  sh->slice_type == HEVC_SLICE_B)) {
662  av_log(s->avctx, AV_LOG_ERROR, "Unknown slice type: %d.\n",
663  sh->slice_type);
664  return AVERROR_INVALIDDATA;
665  }
666  if (IS_IRAP(s) && sh->slice_type != HEVC_SLICE_I) {
667  av_log(s->avctx, AV_LOG_ERROR, "Inter slices in an IRAP frame.\n");
668  return AVERROR_INVALIDDATA;
669  }
670 
671  // when flag is not present, picture is inferred to be output
672  sh->pic_output_flag = 1;
673  if (s->ps.pps->output_flag_present_flag)
674  sh->pic_output_flag = get_bits1(gb);
675 
676  if (s->ps.sps->separate_colour_plane_flag)
677  sh->colour_plane_id = get_bits(gb, 2);
678 
679  if (!IS_IDR(s)) {
680  int poc, pos;
681 
682  sh->pic_order_cnt_lsb = get_bits(gb, s->ps.sps->log2_max_poc_lsb);
683  poc = ff_hevc_compute_poc(s->ps.sps, s->pocTid0, sh->pic_order_cnt_lsb, s->nal_unit_type);
684  if (!sh->first_slice_in_pic_flag && poc != s->poc) {
685  av_log(s->avctx, AV_LOG_WARNING,
686  "Ignoring POC change between slices: %d -> %d\n", s->poc, poc);
687  if (s->avctx->err_recognition & AV_EF_EXPLODE)
688  return AVERROR_INVALIDDATA;
689  poc = s->poc;
690  }
691  s->poc = poc;
692 
694  pos = get_bits_left(gb);
696  ret = ff_hevc_decode_short_term_rps(gb, s->avctx, &sh->slice_rps, s->ps.sps, 1);
697  if (ret < 0)
698  return ret;
699 
700  sh->short_term_rps = &sh->slice_rps;
701  } else {
702  int numbits, rps_idx;
703 
704  if (!s->ps.sps->nb_st_rps) {
705  av_log(s->avctx, AV_LOG_ERROR, "No ref lists in the SPS.\n");
706  return AVERROR_INVALIDDATA;
707  }
708 
709  numbits = av_ceil_log2(s->ps.sps->nb_st_rps);
710  rps_idx = numbits > 0 ? get_bits(gb, numbits) : 0;
711  sh->short_term_rps = &s->ps.sps->st_rps[rps_idx];
712  }
714 
715  pos = get_bits_left(gb);
716  ret = decode_lt_rps(s, &sh->long_term_rps, gb);
717  if (ret < 0) {
718  av_log(s->avctx, AV_LOG_WARNING, "Invalid long term RPS.\n");
719  if (s->avctx->err_recognition & AV_EF_EXPLODE)
720  return AVERROR_INVALIDDATA;
721  }
723 
724  if (s->ps.sps->sps_temporal_mvp_enabled_flag)
726  else
728  } else {
729  s->sh.short_term_rps = NULL;
730  s->poc = 0;
731  }
732 
733  /* 8.3.1 */
734  if (sh->first_slice_in_pic_flag && s->temporal_id == 0 &&
735  s->nal_unit_type != HEVC_NAL_TRAIL_N &&
736  s->nal_unit_type != HEVC_NAL_TSA_N &&
737  s->nal_unit_type != HEVC_NAL_STSA_N &&
738  s->nal_unit_type != HEVC_NAL_RADL_N &&
739  s->nal_unit_type != HEVC_NAL_RADL_R &&
740  s->nal_unit_type != HEVC_NAL_RASL_N &&
741  s->nal_unit_type != HEVC_NAL_RASL_R)
742  s->pocTid0 = s->poc;
743 
744  if (s->ps.sps->sao_enabled) {
746  if (s->ps.sps->chroma_format_idc) {
749  }
750  } else {
754  }
755 
756  sh->nb_refs[L0] = sh->nb_refs[L1] = 0;
757  if (sh->slice_type == HEVC_SLICE_P || sh->slice_type == HEVC_SLICE_B) {
758  int nb_refs;
759 
760  sh->nb_refs[L0] = s->ps.pps->num_ref_idx_l0_default_active;
761  if (sh->slice_type == HEVC_SLICE_B)
762  sh->nb_refs[L1] = s->ps.pps->num_ref_idx_l1_default_active;
763 
764  if (get_bits1(gb)) { // num_ref_idx_active_override_flag
765  sh->nb_refs[L0] = get_ue_golomb_long(gb) + 1;
766  if (sh->slice_type == HEVC_SLICE_B)
767  sh->nb_refs[L1] = get_ue_golomb_long(gb) + 1;
768  }
769  if (sh->nb_refs[L0] > HEVC_MAX_REFS || sh->nb_refs[L1] > HEVC_MAX_REFS) {
770  av_log(s->avctx, AV_LOG_ERROR, "Too many refs: %d/%d.\n",
771  sh->nb_refs[L0], sh->nb_refs[L1]);
772  return AVERROR_INVALIDDATA;
773  }
774 
775  sh->rpl_modification_flag[0] = 0;
776  sh->rpl_modification_flag[1] = 0;
777  nb_refs = ff_hevc_frame_nb_refs(s);
778  if (!nb_refs) {
779  av_log(s->avctx, AV_LOG_ERROR, "Zero refs for a frame with P or B slices.\n");
780  return AVERROR_INVALIDDATA;
781  }
782 
783  if (s->ps.pps->lists_modification_present_flag && nb_refs > 1) {
784  sh->rpl_modification_flag[0] = get_bits1(gb);
785  if (sh->rpl_modification_flag[0]) {
786  for (i = 0; i < sh->nb_refs[L0]; i++)
787  sh->list_entry_lx[0][i] = get_bits(gb, av_ceil_log2(nb_refs));
788  }
789 
790  if (sh->slice_type == HEVC_SLICE_B) {
791  sh->rpl_modification_flag[1] = get_bits1(gb);
792  if (sh->rpl_modification_flag[1] == 1)
793  for (i = 0; i < sh->nb_refs[L1]; i++)
794  sh->list_entry_lx[1][i] = get_bits(gb, av_ceil_log2(nb_refs));
795  }
796  }
797 
798  if (sh->slice_type == HEVC_SLICE_B)
799  sh->mvd_l1_zero_flag = get_bits1(gb);
800 
801  if (s->ps.pps->cabac_init_present_flag)
802  sh->cabac_init_flag = get_bits1(gb);
803  else
804  sh->cabac_init_flag = 0;
805 
806  sh->collocated_ref_idx = 0;
808  sh->collocated_list = L0;
809  if (sh->slice_type == HEVC_SLICE_B)
810  sh->collocated_list = !get_bits1(gb);
811 
812  if (sh->nb_refs[sh->collocated_list] > 1) {
814  if (sh->collocated_ref_idx >= sh->nb_refs[sh->collocated_list]) {
815  av_log(s->avctx, AV_LOG_ERROR,
816  "Invalid collocated_ref_idx: %d.\n",
817  sh->collocated_ref_idx);
818  return AVERROR_INVALIDDATA;
819  }
820  }
821  }
822 
823  if ((s->ps.pps->weighted_pred_flag && sh->slice_type == HEVC_SLICE_P) ||
824  (s->ps.pps->weighted_bipred_flag && sh->slice_type == HEVC_SLICE_B)) {
825  int ret = pred_weight_table(s, gb);
826  if (ret < 0)
827  return ret;
828  }
829 
831  if (sh->max_num_merge_cand < 1 || sh->max_num_merge_cand > 5) {
832  av_log(s->avctx, AV_LOG_ERROR,
833  "Invalid number of merging MVP candidates: %d.\n",
834  sh->max_num_merge_cand);
835  return AVERROR_INVALIDDATA;
836  }
837  }
838 
839  sh->slice_qp_delta = get_se_golomb(gb);
840 
841  if (s->ps.pps->pic_slice_level_chroma_qp_offsets_present_flag) {
844  if (sh->slice_cb_qp_offset < -12 || sh->slice_cb_qp_offset > 12 ||
845  sh->slice_cr_qp_offset < -12 || sh->slice_cr_qp_offset > 12) {
846  av_log(s->avctx, AV_LOG_ERROR, "Invalid slice cx qp offset.\n");
847  return AVERROR_INVALIDDATA;
848  }
849  } else {
850  sh->slice_cb_qp_offset = 0;
851  sh->slice_cr_qp_offset = 0;
852  }
853 
854  if (s->ps.pps->chroma_qp_offset_list_enabled_flag)
856  else
858 
859  if (s->ps.pps->deblocking_filter_control_present_flag) {
860  int deblocking_filter_override_flag = 0;
861 
862  if (s->ps.pps->deblocking_filter_override_enabled_flag)
863  deblocking_filter_override_flag = get_bits1(gb);
864 
865  if (deblocking_filter_override_flag) {
868  int beta_offset_div2 = get_se_golomb(gb);
869  int tc_offset_div2 = get_se_golomb(gb) ;
870  if (beta_offset_div2 < -6 || beta_offset_div2 > 6 ||
871  tc_offset_div2 < -6 || tc_offset_div2 > 6) {
872  av_log(s->avctx, AV_LOG_ERROR,
873  "Invalid deblock filter offsets: %d, %d\n",
874  beta_offset_div2, tc_offset_div2);
875  return AVERROR_INVALIDDATA;
876  }
877  sh->beta_offset = beta_offset_div2 * 2;
878  sh->tc_offset = tc_offset_div2 * 2;
879  }
880  } else {
881  sh->disable_deblocking_filter_flag = s->ps.pps->disable_dbf;
882  sh->beta_offset = s->ps.pps->beta_offset;
883  sh->tc_offset = s->ps.pps->tc_offset;
884  }
885  } else {
887  sh->beta_offset = 0;
888  sh->tc_offset = 0;
889  }
890 
891  if (s->ps.pps->seq_loop_filter_across_slices_enabled_flag &&
896  } else {
897  sh->slice_loop_filter_across_slices_enabled_flag = s->ps.pps->seq_loop_filter_across_slices_enabled_flag;
898  }
899  } else if (!s->slice_initialized) {
900  av_log(s->avctx, AV_LOG_ERROR, "Independent slice segment missing.\n");
901  return AVERROR_INVALIDDATA;
902  }
903 
904  sh->num_entry_point_offsets = 0;
905  if (s->ps.pps->tiles_enabled_flag || s->ps.pps->entropy_coding_sync_enabled_flag) {
906  unsigned num_entry_point_offsets = get_ue_golomb_long(gb);
907  // It would be possible to bound this tighter but this here is simpler
908  if (num_entry_point_offsets > get_bits_left(gb)) {
909  av_log(s->avctx, AV_LOG_ERROR, "num_entry_point_offsets %d is invalid\n", num_entry_point_offsets);
910  return AVERROR_INVALIDDATA;
911  }
912 
913  sh->num_entry_point_offsets = num_entry_point_offsets;
914  if (sh->num_entry_point_offsets > 0) {
915  int offset_len = get_ue_golomb_long(gb) + 1;
916 
917  if (offset_len < 1 || offset_len > 32) {
918  sh->num_entry_point_offsets = 0;
919  av_log(s->avctx, AV_LOG_ERROR, "offset_len %d is invalid\n", offset_len);
920  return AVERROR_INVALIDDATA;
921  }
922 
924  av_freep(&sh->offset);
925  av_freep(&sh->size);
926  sh->entry_point_offset = av_malloc_array(sh->num_entry_point_offsets, sizeof(unsigned));
927  sh->offset = av_malloc_array(sh->num_entry_point_offsets, sizeof(int));
928  sh->size = av_malloc_array(sh->num_entry_point_offsets, sizeof(int));
929  if (!sh->entry_point_offset || !sh->offset || !sh->size) {
930  sh->num_entry_point_offsets = 0;
931  av_log(s->avctx, AV_LOG_ERROR, "Failed to allocate memory\n");
932  return AVERROR(ENOMEM);
933  }
934  for (i = 0; i < sh->num_entry_point_offsets; i++) {
935  unsigned val = get_bits_long(gb, offset_len);
936  sh->entry_point_offset[i] = val + 1; // +1; // +1 to get the size
937  }
938  if (s->threads_number > 1 && (s->ps.pps->num_tile_rows > 1 || s->ps.pps->num_tile_columns > 1)) {
939  s->enable_parallel_tiles = 0; // TODO: you can enable tiles in parallel here
940  s->threads_number = 1;
941  } else
942  s->enable_parallel_tiles = 0;
943  } else
944  s->enable_parallel_tiles = 0;
945  }
946 
947  if (s->ps.pps->slice_header_extension_present_flag) {
948  unsigned int length = get_ue_golomb_long(gb);
949  if (length*8LL > get_bits_left(gb)) {
950  av_log(s->avctx, AV_LOG_ERROR, "too many slice_header_extension_data_bytes\n");
951  return AVERROR_INVALIDDATA;
952  }
953  for (i = 0; i < length; i++)
954  skip_bits(gb, 8); // slice_header_extension_data_byte
955  }
956 
957  // Inferred parameters
958  sh->slice_qp = 26U + s->ps.pps->pic_init_qp_minus26 + sh->slice_qp_delta;
959  if (sh->slice_qp > 51 ||
960  sh->slice_qp < -s->ps.sps->qp_bd_offset) {
961  av_log(s->avctx, AV_LOG_ERROR,
962  "The slice_qp %d is outside the valid range "
963  "[%d, 51].\n",
964  sh->slice_qp,
965  -s->ps.sps->qp_bd_offset);
966  return AVERROR_INVALIDDATA;
967  }
968 
970 
971  if (!s->sh.slice_ctb_addr_rs && s->sh.dependent_slice_segment_flag) {
972  av_log(s->avctx, AV_LOG_ERROR, "Impossible slice segment.\n");
973  return AVERROR_INVALIDDATA;
974  }
975 
976  if (get_bits_left(gb) < 0) {
977  av_log(s->avctx, AV_LOG_ERROR,
978  "Overread slice header by %d bits\n", -get_bits_left(gb));
979  return AVERROR_INVALIDDATA;
980  }
981 
982  s->HEVClc->first_qp_group = !s->sh.dependent_slice_segment_flag;
983 
984  if (!s->ps.pps->cu_qp_delta_enabled_flag)
985  s->HEVClc->qp_y = s->sh.slice_qp;
986 
987  s->slice_initialized = 1;
988  s->HEVClc->tu.cu_qp_offset_cb = 0;
989  s->HEVClc->tu.cu_qp_offset_cr = 0;
990 
991  return 0;
992 }
993 
994 #define CTB(tab, x, y) ((tab)[(y) * s->ps.sps->ctb_width + (x)])
995 
996 #define SET_SAO(elem, value) \
997 do { \
998  if (!sao_merge_up_flag && !sao_merge_left_flag) \
999  sao->elem = value; \
1000  else if (sao_merge_left_flag) \
1001  sao->elem = CTB(s->sao, rx-1, ry).elem; \
1002  else if (sao_merge_up_flag) \
1003  sao->elem = CTB(s->sao, rx, ry-1).elem; \
1004  else \
1005  sao->elem = 0; \
1006 } while (0)
1007 
1008 static void hls_sao_param(HEVCContext *s, int rx, int ry)
1009 {
1010  HEVCLocalContext *lc = s->HEVClc;
1011  int sao_merge_left_flag = 0;
1012  int sao_merge_up_flag = 0;
1013  SAOParams *sao = &CTB(s->sao, rx, ry);
1014  int c_idx, i;
1015 
1016  if (s->sh.slice_sample_adaptive_offset_flag[0] ||
1017  s->sh.slice_sample_adaptive_offset_flag[1]) {
1018  if (rx > 0) {
1019  if (lc->ctb_left_flag)
1020  sao_merge_left_flag = ff_hevc_sao_merge_flag_decode(s);
1021  }
1022  if (ry > 0 && !sao_merge_left_flag) {
1023  if (lc->ctb_up_flag)
1024  sao_merge_up_flag = ff_hevc_sao_merge_flag_decode(s);
1025  }
1026  }
1027 
1028  for (c_idx = 0; c_idx < (s->ps.sps->chroma_format_idc ? 3 : 1); c_idx++) {
1029  int log2_sao_offset_scale = c_idx == 0 ? s->ps.pps->log2_sao_offset_scale_luma :
1030  s->ps.pps->log2_sao_offset_scale_chroma;
1031 
1032  if (!s->sh.slice_sample_adaptive_offset_flag[c_idx]) {
1033  sao->type_idx[c_idx] = SAO_NOT_APPLIED;
1034  continue;
1035  }
1036 
1037  if (c_idx == 2) {
1038  sao->type_idx[2] = sao->type_idx[1];
1039  sao->eo_class[2] = sao->eo_class[1];
1040  } else {
1041  SET_SAO(type_idx[c_idx], ff_hevc_sao_type_idx_decode(s));
1042  }
1043 
1044  if (sao->type_idx[c_idx] == SAO_NOT_APPLIED)
1045  continue;
1046 
1047  for (i = 0; i < 4; i++)
1048  SET_SAO(offset_abs[c_idx][i], ff_hevc_sao_offset_abs_decode(s));
1049 
1050  if (sao->type_idx[c_idx] == SAO_BAND) {
1051  for (i = 0; i < 4; i++) {
1052  if (sao->offset_abs[c_idx][i]) {
1053  SET_SAO(offset_sign[c_idx][i],
1055  } else {
1056  sao->offset_sign[c_idx][i] = 0;
1057  }
1058  }
1059  SET_SAO(band_position[c_idx], ff_hevc_sao_band_position_decode(s));
1060  } else if (c_idx != 2) {
1061  SET_SAO(eo_class[c_idx], ff_hevc_sao_eo_class_decode(s));
1062  }
1063 
1064  // Inferred parameters
1065  sao->offset_val[c_idx][0] = 0;
1066  for (i = 0; i < 4; i++) {
1067  sao->offset_val[c_idx][i + 1] = sao->offset_abs[c_idx][i];
1068  if (sao->type_idx[c_idx] == SAO_EDGE) {
1069  if (i > 1)
1070  sao->offset_val[c_idx][i + 1] = -sao->offset_val[c_idx][i + 1];
1071  } else if (sao->offset_sign[c_idx][i]) {
1072  sao->offset_val[c_idx][i + 1] = -sao->offset_val[c_idx][i + 1];
1073  }
1074  sao->offset_val[c_idx][i + 1] *= 1 << log2_sao_offset_scale;
1075  }
1076  }
1077 }
1078 
1079 #undef SET_SAO
1080 #undef CTB
1081 
1082 static int hls_cross_component_pred(HEVCContext *s, int idx) {
1083  HEVCLocalContext *lc = s->HEVClc;
1084  int log2_res_scale_abs_plus1 = ff_hevc_log2_res_scale_abs(s, idx);
1085 
1086  if (log2_res_scale_abs_plus1 != 0) {
1087  int res_scale_sign_flag = ff_hevc_res_scale_sign_flag(s, idx);
1088  lc->tu.res_scale_val = (1 << (log2_res_scale_abs_plus1 - 1)) *
1089  (1 - 2 * res_scale_sign_flag);
1090  } else {
1091  lc->tu.res_scale_val = 0;
1092  }
1093 
1094 
1095  return 0;
1096 }
1097 
1098 static int hls_transform_unit(HEVCContext *s, int x0, int y0,
1099  int xBase, int yBase, int cb_xBase, int cb_yBase,
1100  int log2_cb_size, int log2_trafo_size,
1101  int blk_idx, int cbf_luma, int *cbf_cb, int *cbf_cr)
1102 {
1103  HEVCLocalContext *lc = s->HEVClc;
1104  const int log2_trafo_size_c = log2_trafo_size - s->ps.sps->hshift[1];
1105  int i;
1106 
1107  if (lc->cu.pred_mode == MODE_INTRA) {
1108  int trafo_size = 1 << log2_trafo_size;
1109  ff_hevc_set_neighbour_available(s, x0, y0, trafo_size, trafo_size);
1110 
1111  s->hpc.intra_pred[log2_trafo_size - 2](s, x0, y0, 0);
1112  }
1113 
1114  if (cbf_luma || cbf_cb[0] || cbf_cr[0] ||
1115  (s->ps.sps->chroma_format_idc == 2 && (cbf_cb[1] || cbf_cr[1]))) {
1116  int scan_idx = SCAN_DIAG;
1117  int scan_idx_c = SCAN_DIAG;
1118  int cbf_chroma = cbf_cb[0] || cbf_cr[0] ||
1119  (s->ps.sps->chroma_format_idc == 2 &&
1120  (cbf_cb[1] || cbf_cr[1]));
1121 
1122  if (s->ps.pps->cu_qp_delta_enabled_flag && !lc->tu.is_cu_qp_delta_coded) {
1124  if (lc->tu.cu_qp_delta != 0)
1125  if (ff_hevc_cu_qp_delta_sign_flag(s) == 1)
1126  lc->tu.cu_qp_delta = -lc->tu.cu_qp_delta;
1127  lc->tu.is_cu_qp_delta_coded = 1;
1128 
1129  if (lc->tu.cu_qp_delta < -(26 + s->ps.sps->qp_bd_offset / 2) ||
1130  lc->tu.cu_qp_delta > (25 + s->ps.sps->qp_bd_offset / 2)) {
1131  av_log(s->avctx, AV_LOG_ERROR,
1132  "The cu_qp_delta %d is outside the valid range "
1133  "[%d, %d].\n",
1134  lc->tu.cu_qp_delta,
1135  -(26 + s->ps.sps->qp_bd_offset / 2),
1136  (25 + s->ps.sps->qp_bd_offset / 2));
1137  return AVERROR_INVALIDDATA;
1138  }
1139 
1140  ff_hevc_set_qPy(s, cb_xBase, cb_yBase, log2_cb_size);
1141  }
1142 
1143  if (s->sh.cu_chroma_qp_offset_enabled_flag && cbf_chroma &&
1145  int cu_chroma_qp_offset_flag = ff_hevc_cu_chroma_qp_offset_flag(s);
1146  if (cu_chroma_qp_offset_flag) {
1147  int cu_chroma_qp_offset_idx = 0;
1148  if (s->ps.pps->chroma_qp_offset_list_len_minus1 > 0) {
1149  cu_chroma_qp_offset_idx = ff_hevc_cu_chroma_qp_offset_idx(s);
1150  av_log(s->avctx, AV_LOG_ERROR,
1151  "cu_chroma_qp_offset_idx not yet tested.\n");
1152  }
1153  lc->tu.cu_qp_offset_cb = s->ps.pps->cb_qp_offset_list[cu_chroma_qp_offset_idx];
1154  lc->tu.cu_qp_offset_cr = s->ps.pps->cr_qp_offset_list[cu_chroma_qp_offset_idx];
1155  } else {
1156  lc->tu.cu_qp_offset_cb = 0;
1157  lc->tu.cu_qp_offset_cr = 0;
1158  }
1160  }
1161 
1162  if (lc->cu.pred_mode == MODE_INTRA && log2_trafo_size < 4) {
1163  if (lc->tu.intra_pred_mode >= 6 &&
1164  lc->tu.intra_pred_mode <= 14) {
1165  scan_idx = SCAN_VERT;
1166  } else if (lc->tu.intra_pred_mode >= 22 &&
1167  lc->tu.intra_pred_mode <= 30) {
1168  scan_idx = SCAN_HORIZ;
1169  }
1170 
1171  if (lc->tu.intra_pred_mode_c >= 6 &&
1172  lc->tu.intra_pred_mode_c <= 14) {
1173  scan_idx_c = SCAN_VERT;
1174  } else if (lc->tu.intra_pred_mode_c >= 22 &&
1175  lc->tu.intra_pred_mode_c <= 30) {
1176  scan_idx_c = SCAN_HORIZ;
1177  }
1178  }
1179 
1180  lc->tu.cross_pf = 0;
1181 
1182  if (cbf_luma)
1183  ff_hevc_hls_residual_coding(s, x0, y0, log2_trafo_size, scan_idx, 0);
1184  if (s->ps.sps->chroma_format_idc && (log2_trafo_size > 2 || s->ps.sps->chroma_format_idc == 3)) {
1185  int trafo_size_h = 1 << (log2_trafo_size_c + s->ps.sps->hshift[1]);
1186  int trafo_size_v = 1 << (log2_trafo_size_c + s->ps.sps->vshift[1]);
1187  lc->tu.cross_pf = (s->ps.pps->cross_component_prediction_enabled_flag && cbf_luma &&
1188  (lc->cu.pred_mode == MODE_INTER ||
1189  (lc->tu.chroma_mode_c == 4)));
1190 
1191  if (lc->tu.cross_pf) {
1193  }
1194  for (i = 0; i < (s->ps.sps->chroma_format_idc == 2 ? 2 : 1); i++) {
1195  if (lc->cu.pred_mode == MODE_INTRA) {
1196  ff_hevc_set_neighbour_available(s, x0, y0 + (i << log2_trafo_size_c), trafo_size_h, trafo_size_v);
1197  s->hpc.intra_pred[log2_trafo_size_c - 2](s, x0, y0 + (i << log2_trafo_size_c), 1);
1198  }
1199  if (cbf_cb[i])
1200  ff_hevc_hls_residual_coding(s, x0, y0 + (i << log2_trafo_size_c),
1201  log2_trafo_size_c, scan_idx_c, 1);
1202  else
1203  if (lc->tu.cross_pf) {
1204  ptrdiff_t stride = s->frame->linesize[1];
1205  int hshift = s->ps.sps->hshift[1];
1206  int vshift = s->ps.sps->vshift[1];
1207  int16_t *coeffs_y = (int16_t*)lc->edge_emu_buffer;
1208  int16_t *coeffs = (int16_t*)lc->edge_emu_buffer2;
1209  int size = 1 << log2_trafo_size_c;
1210 
1211  uint8_t *dst = &s->frame->data[1][(y0 >> vshift) * stride +
1212  ((x0 >> hshift) << s->ps.sps->pixel_shift)];
1213  for (i = 0; i < (size * size); i++) {
1214  coeffs[i] = ((lc->tu.res_scale_val * coeffs_y[i]) >> 3);
1215  }
1216  s->hevcdsp.add_residual[log2_trafo_size_c-2](dst, coeffs, stride);
1217  }
1218  }
1219 
1220  if (lc->tu.cross_pf) {
1222  }
1223  for (i = 0; i < (s->ps.sps->chroma_format_idc == 2 ? 2 : 1); i++) {
1224  if (lc->cu.pred_mode == MODE_INTRA) {
1225  ff_hevc_set_neighbour_available(s, x0, y0 + (i << log2_trafo_size_c), trafo_size_h, trafo_size_v);
1226  s->hpc.intra_pred[log2_trafo_size_c - 2](s, x0, y0 + (i << log2_trafo_size_c), 2);
1227  }
1228  if (cbf_cr[i])
1229  ff_hevc_hls_residual_coding(s, x0, y0 + (i << log2_trafo_size_c),
1230  log2_trafo_size_c, scan_idx_c, 2);
1231  else
1232  if (lc->tu.cross_pf) {
1233  ptrdiff_t stride = s->frame->linesize[2];
1234  int hshift = s->ps.sps->hshift[2];
1235  int vshift = s->ps.sps->vshift[2];
1236  int16_t *coeffs_y = (int16_t*)lc->edge_emu_buffer;
1237  int16_t *coeffs = (int16_t*)lc->edge_emu_buffer2;
1238  int size = 1 << log2_trafo_size_c;
1239 
1240  uint8_t *dst = &s->frame->data[2][(y0 >> vshift) * stride +
1241  ((x0 >> hshift) << s->ps.sps->pixel_shift)];
1242  for (i = 0; i < (size * size); i++) {
1243  coeffs[i] = ((lc->tu.res_scale_val * coeffs_y[i]) >> 3);
1244  }
1245  s->hevcdsp.add_residual[log2_trafo_size_c-2](dst, coeffs, stride);
1246  }
1247  }
1248  } else if (s->ps.sps->chroma_format_idc && blk_idx == 3) {
1249  int trafo_size_h = 1 << (log2_trafo_size + 1);
1250  int trafo_size_v = 1 << (log2_trafo_size + s->ps.sps->vshift[1]);
1251  for (i = 0; i < (s->ps.sps->chroma_format_idc == 2 ? 2 : 1); i++) {
1252  if (lc->cu.pred_mode == MODE_INTRA) {
1253  ff_hevc_set_neighbour_available(s, xBase, yBase + (i << log2_trafo_size),
1254  trafo_size_h, trafo_size_v);
1255  s->hpc.intra_pred[log2_trafo_size - 2](s, xBase, yBase + (i << log2_trafo_size), 1);
1256  }
1257  if (cbf_cb[i])
1258  ff_hevc_hls_residual_coding(s, xBase, yBase + (i << log2_trafo_size),
1259  log2_trafo_size, scan_idx_c, 1);
1260  }
1261  for (i = 0; i < (s->ps.sps->chroma_format_idc == 2 ? 2 : 1); i++) {
1262  if (lc->cu.pred_mode == MODE_INTRA) {
1263  ff_hevc_set_neighbour_available(s, xBase, yBase + (i << log2_trafo_size),
1264  trafo_size_h, trafo_size_v);
1265  s->hpc.intra_pred[log2_trafo_size - 2](s, xBase, yBase + (i << log2_trafo_size), 2);
1266  }
1267  if (cbf_cr[i])
1268  ff_hevc_hls_residual_coding(s, xBase, yBase + (i << log2_trafo_size),
1269  log2_trafo_size, scan_idx_c, 2);
1270  }
1271  }
1272  } else if (s->ps.sps->chroma_format_idc && lc->cu.pred_mode == MODE_INTRA) {
1273  if (log2_trafo_size > 2 || s->ps.sps->chroma_format_idc == 3) {
1274  int trafo_size_h = 1 << (log2_trafo_size_c + s->ps.sps->hshift[1]);
1275  int trafo_size_v = 1 << (log2_trafo_size_c + s->ps.sps->vshift[1]);
1276  ff_hevc_set_neighbour_available(s, x0, y0, trafo_size_h, trafo_size_v);
1277  s->hpc.intra_pred[log2_trafo_size_c - 2](s, x0, y0, 1);
1278  s->hpc.intra_pred[log2_trafo_size_c - 2](s, x0, y0, 2);
1279  if (s->ps.sps->chroma_format_idc == 2) {
1280  ff_hevc_set_neighbour_available(s, x0, y0 + (1 << log2_trafo_size_c),
1281  trafo_size_h, trafo_size_v);
1282  s->hpc.intra_pred[log2_trafo_size_c - 2](s, x0, y0 + (1 << log2_trafo_size_c), 1);
1283  s->hpc.intra_pred[log2_trafo_size_c - 2](s, x0, y0 + (1 << log2_trafo_size_c), 2);
1284  }
1285  } else if (blk_idx == 3) {
1286  int trafo_size_h = 1 << (log2_trafo_size + 1);
1287  int trafo_size_v = 1 << (log2_trafo_size + s->ps.sps->vshift[1]);
1288  ff_hevc_set_neighbour_available(s, xBase, yBase,
1289  trafo_size_h, trafo_size_v);
1290  s->hpc.intra_pred[log2_trafo_size - 2](s, xBase, yBase, 1);
1291  s->hpc.intra_pred[log2_trafo_size - 2](s, xBase, yBase, 2);
1292  if (s->ps.sps->chroma_format_idc == 2) {
1293  ff_hevc_set_neighbour_available(s, xBase, yBase + (1 << (log2_trafo_size)),
1294  trafo_size_h, trafo_size_v);
1295  s->hpc.intra_pred[log2_trafo_size - 2](s, xBase, yBase + (1 << (log2_trafo_size)), 1);
1296  s->hpc.intra_pred[log2_trafo_size - 2](s, xBase, yBase + (1 << (log2_trafo_size)), 2);
1297  }
1298  }
1299  }
1300 
1301  return 0;
1302 }
1303 
1304 static void set_deblocking_bypass(HEVCContext *s, int x0, int y0, int log2_cb_size)
1305 {
1306  int cb_size = 1 << log2_cb_size;
1307  int log2_min_pu_size = s->ps.sps->log2_min_pu_size;
1308 
1309  int min_pu_width = s->ps.sps->min_pu_width;
1310  int x_end = FFMIN(x0 + cb_size, s->ps.sps->width);
1311  int y_end = FFMIN(y0 + cb_size, s->ps.sps->height);
1312  int i, j;
1313 
1314  for (j = (y0 >> log2_min_pu_size); j < (y_end >> log2_min_pu_size); j++)
1315  for (i = (x0 >> log2_min_pu_size); i < (x_end >> log2_min_pu_size); i++)
1316  s->is_pcm[i + j * min_pu_width] = 2;
1317 }
1318 
1319 static int hls_transform_tree(HEVCContext *s, int x0, int y0,
1320  int xBase, int yBase, int cb_xBase, int cb_yBase,
1321  int log2_cb_size, int log2_trafo_size,
1322  int trafo_depth, int blk_idx,
1323  const int *base_cbf_cb, const int *base_cbf_cr)
1324 {
1325  HEVCLocalContext *lc = s->HEVClc;
1326  uint8_t split_transform_flag;
1327  int cbf_cb[2];
1328  int cbf_cr[2];
1329  int ret;
1330 
1331  cbf_cb[0] = base_cbf_cb[0];
1332  cbf_cb[1] = base_cbf_cb[1];
1333  cbf_cr[0] = base_cbf_cr[0];
1334  cbf_cr[1] = base_cbf_cr[1];
1335 
1336  if (lc->cu.intra_split_flag) {
1337  if (trafo_depth == 1) {
1338  lc->tu.intra_pred_mode = lc->pu.intra_pred_mode[blk_idx];
1339  if (s->ps.sps->chroma_format_idc == 3) {
1340  lc->tu.intra_pred_mode_c = lc->pu.intra_pred_mode_c[blk_idx];
1341  lc->tu.chroma_mode_c = lc->pu.chroma_mode_c[blk_idx];
1342  } else {
1344  lc->tu.chroma_mode_c = lc->pu.chroma_mode_c[0];
1345  }
1346  }
1347  } else {
1348  lc->tu.intra_pred_mode = lc->pu.intra_pred_mode[0];
1350  lc->tu.chroma_mode_c = lc->pu.chroma_mode_c[0];
1351  }
1352 
1353  if (log2_trafo_size <= s->ps.sps->log2_max_trafo_size &&
1354  log2_trafo_size > s->ps.sps->log2_min_tb_size &&
1355  trafo_depth < lc->cu.max_trafo_depth &&
1356  !(lc->cu.intra_split_flag && trafo_depth == 0)) {
1357  split_transform_flag = ff_hevc_split_transform_flag_decode(s, log2_trafo_size);
1358  } else {
1359  int inter_split = s->ps.sps->max_transform_hierarchy_depth_inter == 0 &&
1360  lc->cu.pred_mode == MODE_INTER &&
1361  lc->cu.part_mode != PART_2Nx2N &&
1362  trafo_depth == 0;
1363 
1364  split_transform_flag = log2_trafo_size > s->ps.sps->log2_max_trafo_size ||
1365  (lc->cu.intra_split_flag && trafo_depth == 0) ||
1366  inter_split;
1367  }
1368 
1369  if (s->ps.sps->chroma_format_idc && (log2_trafo_size > 2 || s->ps.sps->chroma_format_idc == 3)) {
1370  if (trafo_depth == 0 || cbf_cb[0]) {
1371  cbf_cb[0] = ff_hevc_cbf_cb_cr_decode(s, trafo_depth);
1372  if (s->ps.sps->chroma_format_idc == 2 && (!split_transform_flag || log2_trafo_size == 3)) {
1373  cbf_cb[1] = ff_hevc_cbf_cb_cr_decode(s, trafo_depth);
1374  }
1375  }
1376 
1377  if (trafo_depth == 0 || cbf_cr[0]) {
1378  cbf_cr[0] = ff_hevc_cbf_cb_cr_decode(s, trafo_depth);
1379  if (s->ps.sps->chroma_format_idc == 2 && (!split_transform_flag || log2_trafo_size == 3)) {
1380  cbf_cr[1] = ff_hevc_cbf_cb_cr_decode(s, trafo_depth);
1381  }
1382  }
1383  }
1384 
1385  if (split_transform_flag) {
1386  const int trafo_size_split = 1 << (log2_trafo_size - 1);
1387  const int x1 = x0 + trafo_size_split;
1388  const int y1 = y0 + trafo_size_split;
1389 
1390 #define SUBDIVIDE(x, y, idx) \
1391 do { \
1392  ret = hls_transform_tree(s, x, y, x0, y0, cb_xBase, cb_yBase, log2_cb_size, \
1393  log2_trafo_size - 1, trafo_depth + 1, idx, \
1394  cbf_cb, cbf_cr); \
1395  if (ret < 0) \
1396  return ret; \
1397 } while (0)
1398 
1399  SUBDIVIDE(x0, y0, 0);
1400  SUBDIVIDE(x1, y0, 1);
1401  SUBDIVIDE(x0, y1, 2);
1402  SUBDIVIDE(x1, y1, 3);
1403 
1404 #undef SUBDIVIDE
1405  } else {
1406  int min_tu_size = 1 << s->ps.sps->log2_min_tb_size;
1407  int log2_min_tu_size = s->ps.sps->log2_min_tb_size;
1408  int min_tu_width = s->ps.sps->min_tb_width;
1409  int cbf_luma = 1;
1410 
1411  if (lc->cu.pred_mode == MODE_INTRA || trafo_depth != 0 ||
1412  cbf_cb[0] || cbf_cr[0] ||
1413  (s->ps.sps->chroma_format_idc == 2 && (cbf_cb[1] || cbf_cr[1]))) {
1414  cbf_luma = ff_hevc_cbf_luma_decode(s, trafo_depth);
1415  }
1416 
1417  ret = hls_transform_unit(s, x0, y0, xBase, yBase, cb_xBase, cb_yBase,
1418  log2_cb_size, log2_trafo_size,
1419  blk_idx, cbf_luma, cbf_cb, cbf_cr);
1420  if (ret < 0)
1421  return ret;
1422  // TODO: store cbf_luma somewhere else
1423  if (cbf_luma) {
1424  int i, j;
1425  for (i = 0; i < (1 << log2_trafo_size); i += min_tu_size)
1426  for (j = 0; j < (1 << log2_trafo_size); j += min_tu_size) {
1427  int x_tu = (x0 + j) >> log2_min_tu_size;
1428  int y_tu = (y0 + i) >> log2_min_tu_size;
1429  s->cbf_luma[y_tu * min_tu_width + x_tu] = 1;
1430  }
1431  }
1432  if (!s->sh.disable_deblocking_filter_flag) {
1433  ff_hevc_deblocking_boundary_strengths(s, x0, y0, log2_trafo_size);
1434  if (s->ps.pps->transquant_bypass_enable_flag &&
1436  set_deblocking_bypass(s, x0, y0, log2_trafo_size);
1437  }
1438  }
1439  return 0;
1440 }
1441 
1442 static int hls_pcm_sample(HEVCContext *s, int x0, int y0, int log2_cb_size)
1443 {
1444  HEVCLocalContext *lc = s->HEVClc;
1445  GetBitContext gb;
1446  int cb_size = 1 << log2_cb_size;
1447  ptrdiff_t stride0 = s->frame->linesize[0];
1448  ptrdiff_t stride1 = s->frame->linesize[1];
1449  ptrdiff_t stride2 = s->frame->linesize[2];
1450  uint8_t *dst0 = &s->frame->data[0][y0 * stride0 + (x0 << s->ps.sps->pixel_shift)];
1451  uint8_t *dst1 = &s->frame->data[1][(y0 >> s->ps.sps->vshift[1]) * stride1 + ((x0 >> s->ps.sps->hshift[1]) << s->ps.sps->pixel_shift)];
1452  uint8_t *dst2 = &s->frame->data[2][(y0 >> s->ps.sps->vshift[2]) * stride2 + ((x0 >> s->ps.sps->hshift[2]) << s->ps.sps->pixel_shift)];
1453 
1454  int length = cb_size * cb_size * s->ps.sps->pcm.bit_depth +
1455  (((cb_size >> s->ps.sps->hshift[1]) * (cb_size >> s->ps.sps->vshift[1])) +
1456  ((cb_size >> s->ps.sps->hshift[2]) * (cb_size >> s->ps.sps->vshift[2]))) *
1457  s->ps.sps->pcm.bit_depth_chroma;
1458  const uint8_t *pcm = skip_bytes(&lc->cc, (length + 7) >> 3);
1459  int ret;
1460 
1461  if (!s->sh.disable_deblocking_filter_flag)
1462  ff_hevc_deblocking_boundary_strengths(s, x0, y0, log2_cb_size);
1463 
1464  ret = init_get_bits(&gb, pcm, length);
1465  if (ret < 0)
1466  return ret;
1467 
1468  s->hevcdsp.put_pcm(dst0, stride0, cb_size, cb_size, &gb, s->ps.sps->pcm.bit_depth);
1469  if (s->ps.sps->chroma_format_idc) {
1470  s->hevcdsp.put_pcm(dst1, stride1,
1471  cb_size >> s->ps.sps->hshift[1],
1472  cb_size >> s->ps.sps->vshift[1],
1473  &gb, s->ps.sps->pcm.bit_depth_chroma);
1474  s->hevcdsp.put_pcm(dst2, stride2,
1475  cb_size >> s->ps.sps->hshift[2],
1476  cb_size >> s->ps.sps->vshift[2],
1477  &gb, s->ps.sps->pcm.bit_depth_chroma);
1478  }
1479 
1480  return 0;
1481 }
1482 
1483 /**
1484  * 8.5.3.2.2.1 Luma sample unidirectional interpolation process
1485  *
1486  * @param s HEVC decoding context
1487  * @param dst target buffer for block data at block position
1488  * @param dststride stride of the dst buffer
1489  * @param ref reference picture buffer at origin (0, 0)
1490  * @param mv motion vector (relative to block position) to get pixel data from
1491  * @param x_off horizontal position of block from origin (0, 0)
1492  * @param y_off vertical position of block from origin (0, 0)
1493  * @param block_w width of block
1494  * @param block_h height of block
1495  * @param luma_weight weighting factor applied to the luma prediction
1496  * @param luma_offset additive offset applied to the luma prediction value
1497  */
1498 
1499 static void luma_mc_uni(HEVCContext *s, uint8_t *dst, ptrdiff_t dststride,
1500  AVFrame *ref, const Mv *mv, int x_off, int y_off,
1501  int block_w, int block_h, int luma_weight, int luma_offset)
1502 {
1503  HEVCLocalContext *lc = s->HEVClc;
1504  uint8_t *src = ref->data[0];
1505  ptrdiff_t srcstride = ref->linesize[0];
1506  int pic_width = s->ps.sps->width;
1507  int pic_height = s->ps.sps->height;
1508  int mx = mv->x & 3;
1509  int my = mv->y & 3;
1510  int weight_flag = (s->sh.slice_type == HEVC_SLICE_P && s->ps.pps->weighted_pred_flag) ||
1511  (s->sh.slice_type == HEVC_SLICE_B && s->ps.pps->weighted_bipred_flag);
1512  int idx = hevc_pel_weight[block_w];
1513 
1514  x_off += mv->x >> 2;
1515  y_off += mv->y >> 2;
1516  src += y_off * srcstride + (x_off * (1 << s->ps.sps->pixel_shift));
1517 
1518  if (x_off < QPEL_EXTRA_BEFORE || y_off < QPEL_EXTRA_AFTER ||
1519  x_off >= pic_width - block_w - QPEL_EXTRA_AFTER ||
1520  y_off >= pic_height - block_h - QPEL_EXTRA_AFTER) {
1521  const ptrdiff_t edge_emu_stride = EDGE_EMU_BUFFER_STRIDE << s->ps.sps->pixel_shift;
1522  int offset = QPEL_EXTRA_BEFORE * srcstride + (QPEL_EXTRA_BEFORE << s->ps.sps->pixel_shift);
1523  int buf_offset = QPEL_EXTRA_BEFORE * edge_emu_stride + (QPEL_EXTRA_BEFORE << s->ps.sps->pixel_shift);
1524 
1525  s->vdsp.emulated_edge_mc(lc->edge_emu_buffer, src - offset,
1526  edge_emu_stride, srcstride,
1527  block_w + QPEL_EXTRA,
1528  block_h + QPEL_EXTRA,
1529  x_off - QPEL_EXTRA_BEFORE, y_off - QPEL_EXTRA_BEFORE,
1530  pic_width, pic_height);
1531  src = lc->edge_emu_buffer + buf_offset;
1532  srcstride = edge_emu_stride;
1533  }
1534 
1535  if (!weight_flag)
1536  s->hevcdsp.put_hevc_qpel_uni[idx][!!my][!!mx](dst, dststride, src, srcstride,
1537  block_h, mx, my, block_w);
1538  else
1539  s->hevcdsp.put_hevc_qpel_uni_w[idx][!!my][!!mx](dst, dststride, src, srcstride,
1540  block_h, s->sh.luma_log2_weight_denom,
1541  luma_weight, luma_offset, mx, my, block_w);
1542 }
1543 
1544 /**
1545  * 8.5.3.2.2.1 Luma sample bidirectional interpolation process
1546  *
1547  * @param s HEVC decoding context
1548  * @param dst target buffer for block data at block position
1549  * @param dststride stride of the dst buffer
1550  * @param ref0 reference picture0 buffer at origin (0, 0)
1551  * @param mv0 motion vector0 (relative to block position) to get pixel data from
1552  * @param x_off horizontal position of block from origin (0, 0)
1553  * @param y_off vertical position of block from origin (0, 0)
1554  * @param block_w width of block
1555  * @param block_h height of block
1556  * @param ref1 reference picture1 buffer at origin (0, 0)
1557  * @param mv1 motion vector1 (relative to block position) to get pixel data from
1558  * @param current_mv current motion vector structure
1559  */
1560  static void luma_mc_bi(HEVCContext *s, uint8_t *dst, ptrdiff_t dststride,
1561  AVFrame *ref0, const Mv *mv0, int x_off, int y_off,
1562  int block_w, int block_h, AVFrame *ref1, const Mv *mv1, struct MvField *current_mv)
1563 {
1564  HEVCLocalContext *lc = s->HEVClc;
1565  ptrdiff_t src0stride = ref0->linesize[0];
1566  ptrdiff_t src1stride = ref1->linesize[0];
1567  int pic_width = s->ps.sps->width;
1568  int pic_height = s->ps.sps->height;
1569  int mx0 = mv0->x & 3;
1570  int my0 = mv0->y & 3;
1571  int mx1 = mv1->x & 3;
1572  int my1 = mv1->y & 3;
1573  int weight_flag = (s->sh.slice_type == HEVC_SLICE_P && s->ps.pps->weighted_pred_flag) ||
1574  (s->sh.slice_type == HEVC_SLICE_B && s->ps.pps->weighted_bipred_flag);
1575  int x_off0 = x_off + (mv0->x >> 2);
1576  int y_off0 = y_off + (mv0->y >> 2);
1577  int x_off1 = x_off + (mv1->x >> 2);
1578  int y_off1 = y_off + (mv1->y >> 2);
1579  int idx = hevc_pel_weight[block_w];
1580 
1581  uint8_t *src0 = ref0->data[0] + y_off0 * src0stride + (int)((unsigned)x_off0 << s->ps.sps->pixel_shift);
1582  uint8_t *src1 = ref1->data[0] + y_off1 * src1stride + (int)((unsigned)x_off1 << s->ps.sps->pixel_shift);
1583 
1584  if (x_off0 < QPEL_EXTRA_BEFORE || y_off0 < QPEL_EXTRA_AFTER ||
1585  x_off0 >= pic_width - block_w - QPEL_EXTRA_AFTER ||
1586  y_off0 >= pic_height - block_h - QPEL_EXTRA_AFTER) {
1587  const ptrdiff_t edge_emu_stride = EDGE_EMU_BUFFER_STRIDE << s->ps.sps->pixel_shift;
1588  int offset = QPEL_EXTRA_BEFORE * src0stride + (QPEL_EXTRA_BEFORE << s->ps.sps->pixel_shift);
1589  int buf_offset = QPEL_EXTRA_BEFORE * edge_emu_stride + (QPEL_EXTRA_BEFORE << s->ps.sps->pixel_shift);
1590 
1591  s->vdsp.emulated_edge_mc(lc->edge_emu_buffer, src0 - offset,
1592  edge_emu_stride, src0stride,
1593  block_w + QPEL_EXTRA,
1594  block_h + QPEL_EXTRA,
1595  x_off0 - QPEL_EXTRA_BEFORE, y_off0 - QPEL_EXTRA_BEFORE,
1596  pic_width, pic_height);
1597  src0 = lc->edge_emu_buffer + buf_offset;
1598  src0stride = edge_emu_stride;
1599  }
1600 
1601  if (x_off1 < QPEL_EXTRA_BEFORE || y_off1 < QPEL_EXTRA_AFTER ||
1602  x_off1 >= pic_width - block_w - QPEL_EXTRA_AFTER ||
1603  y_off1 >= pic_height - block_h - QPEL_EXTRA_AFTER) {
1604  const ptrdiff_t edge_emu_stride = EDGE_EMU_BUFFER_STRIDE << s->ps.sps->pixel_shift;
1605  int offset = QPEL_EXTRA_BEFORE * src1stride + (QPEL_EXTRA_BEFORE << s->ps.sps->pixel_shift);
1606  int buf_offset = QPEL_EXTRA_BEFORE * edge_emu_stride + (QPEL_EXTRA_BEFORE << s->ps.sps->pixel_shift);
1607 
1608  s->vdsp.emulated_edge_mc(lc->edge_emu_buffer2, src1 - offset,
1609  edge_emu_stride, src1stride,
1610  block_w + QPEL_EXTRA,
1611  block_h + QPEL_EXTRA,
1612  x_off1 - QPEL_EXTRA_BEFORE, y_off1 - QPEL_EXTRA_BEFORE,
1613  pic_width, pic_height);
1614  src1 = lc->edge_emu_buffer2 + buf_offset;
1615  src1stride = edge_emu_stride;
1616  }
1617 
1618  s->hevcdsp.put_hevc_qpel[idx][!!my0][!!mx0](lc->tmp, src0, src0stride,
1619  block_h, mx0, my0, block_w);
1620  if (!weight_flag)
1621  s->hevcdsp.put_hevc_qpel_bi[idx][!!my1][!!mx1](dst, dststride, src1, src1stride, lc->tmp,
1622  block_h, mx1, my1, block_w);
1623  else
1624  s->hevcdsp.put_hevc_qpel_bi_w[idx][!!my1][!!mx1](dst, dststride, src1, src1stride, lc->tmp,
1625  block_h, s->sh.luma_log2_weight_denom,
1626  s->sh.luma_weight_l0[current_mv->ref_idx[0]],
1627  s->sh.luma_weight_l1[current_mv->ref_idx[1]],
1628  s->sh.luma_offset_l0[current_mv->ref_idx[0]],
1629  s->sh.luma_offset_l1[current_mv->ref_idx[1]],
1630  mx1, my1, block_w);
1631 
1632 }
1633 
1634 /**
1635  * 8.5.3.2.2.2 Chroma sample uniprediction interpolation process
1636  *
1637  * @param s HEVC decoding context
1638  * @param dst1 target buffer for block data at block position (U plane)
1639  * @param dst2 target buffer for block data at block position (V plane)
1640  * @param dststride stride of the dst1 and dst2 buffers
1641  * @param ref reference picture buffer at origin (0, 0)
1642  * @param mv motion vector (relative to block position) to get pixel data from
1643  * @param x_off horizontal position of block from origin (0, 0)
1644  * @param y_off vertical position of block from origin (0, 0)
1645  * @param block_w width of block
1646  * @param block_h height of block
1647  * @param chroma_weight weighting factor applied to the chroma prediction
1648  * @param chroma_offset additive offset applied to the chroma prediction value
1649  */
1650 
1651 static void chroma_mc_uni(HEVCContext *s, uint8_t *dst0,
1652  ptrdiff_t dststride, uint8_t *src0, ptrdiff_t srcstride, int reflist,
1653  int x_off, int y_off, int block_w, int block_h, struct MvField *current_mv, int chroma_weight, int chroma_offset)
1654 {
1655  HEVCLocalContext *lc = s->HEVClc;
1656  int pic_width = s->ps.sps->width >> s->ps.sps->hshift[1];
1657  int pic_height = s->ps.sps->height >> s->ps.sps->vshift[1];
1658  const Mv *mv = &current_mv->mv[reflist];
1659  int weight_flag = (s->sh.slice_type == HEVC_SLICE_P && s->ps.pps->weighted_pred_flag) ||
1660  (s->sh.slice_type == HEVC_SLICE_B && s->ps.pps->weighted_bipred_flag);
1661  int idx = hevc_pel_weight[block_w];
1662  int hshift = s->ps.sps->hshift[1];
1663  int vshift = s->ps.sps->vshift[1];
1664  intptr_t mx = av_mod_uintp2(mv->x, 2 + hshift);
1665  intptr_t my = av_mod_uintp2(mv->y, 2 + vshift);
1666  intptr_t _mx = mx << (1 - hshift);
1667  intptr_t _my = my << (1 - vshift);
1668 
1669  x_off += mv->x >> (2 + hshift);
1670  y_off += mv->y >> (2 + vshift);
1671  src0 += y_off * srcstride + (x_off * (1 << s->ps.sps->pixel_shift));
1672 
1673  if (x_off < EPEL_EXTRA_BEFORE || y_off < EPEL_EXTRA_AFTER ||
1674  x_off >= pic_width - block_w - EPEL_EXTRA_AFTER ||
1675  y_off >= pic_height - block_h - EPEL_EXTRA_AFTER) {
1676  const int edge_emu_stride = EDGE_EMU_BUFFER_STRIDE << s->ps.sps->pixel_shift;
1677  int offset0 = EPEL_EXTRA_BEFORE * (srcstride + (1 << s->ps.sps->pixel_shift));
1678  int buf_offset0 = EPEL_EXTRA_BEFORE *
1679  (edge_emu_stride + (1 << s->ps.sps->pixel_shift));
1680  s->vdsp.emulated_edge_mc(lc->edge_emu_buffer, src0 - offset0,
1681  edge_emu_stride, srcstride,
1682  block_w + EPEL_EXTRA, block_h + EPEL_EXTRA,
1683  x_off - EPEL_EXTRA_BEFORE,
1684  y_off - EPEL_EXTRA_BEFORE,
1685  pic_width, pic_height);
1686 
1687  src0 = lc->edge_emu_buffer + buf_offset0;
1688  srcstride = edge_emu_stride;
1689  }
1690  if (!weight_flag)
1691  s->hevcdsp.put_hevc_epel_uni[idx][!!my][!!mx](dst0, dststride, src0, srcstride,
1692  block_h, _mx, _my, block_w);
1693  else
1694  s->hevcdsp.put_hevc_epel_uni_w[idx][!!my][!!mx](dst0, dststride, src0, srcstride,
1695  block_h, s->sh.chroma_log2_weight_denom,
1696  chroma_weight, chroma_offset, _mx, _my, block_w);
1697 }
1698 
1699 /**
1700  * 8.5.3.2.2.2 Chroma sample bidirectional interpolation process
1701  *
1702  * @param s HEVC decoding context
1703  * @param dst target buffer for block data at block position
1704  * @param dststride stride of the dst buffer
1705  * @param ref0 reference picture0 buffer at origin (0, 0)
1706  * @param mv0 motion vector0 (relative to block position) to get pixel data from
1707  * @param x_off horizontal position of block from origin (0, 0)
1708  * @param y_off vertical position of block from origin (0, 0)
1709  * @param block_w width of block
1710  * @param block_h height of block
1711  * @param ref1 reference picture1 buffer at origin (0, 0)
1712  * @param mv1 motion vector1 (relative to block position) to get pixel data from
1713  * @param current_mv current motion vector structure
1714  * @param cidx chroma component(cb, cr)
1715  */
1716 static void chroma_mc_bi(HEVCContext *s, uint8_t *dst0, ptrdiff_t dststride, AVFrame *ref0, AVFrame *ref1,
1717  int x_off, int y_off, int block_w, int block_h, struct MvField *current_mv, int cidx)
1718 {
1719  HEVCLocalContext *lc = s->HEVClc;
1720  uint8_t *src1 = ref0->data[cidx+1];
1721  uint8_t *src2 = ref1->data[cidx+1];
1722  ptrdiff_t src1stride = ref0->linesize[cidx+1];
1723  ptrdiff_t src2stride = ref1->linesize[cidx+1];
1724  int weight_flag = (s->sh.slice_type == HEVC_SLICE_P && s->ps.pps->weighted_pred_flag) ||
1725  (s->sh.slice_type == HEVC_SLICE_B && s->ps.pps->weighted_bipred_flag);
1726  int pic_width = s->ps.sps->width >> s->ps.sps->hshift[1];
1727  int pic_height = s->ps.sps->height >> s->ps.sps->vshift[1];
1728  Mv *mv0 = &current_mv->mv[0];
1729  Mv *mv1 = &current_mv->mv[1];
1730  int hshift = s->ps.sps->hshift[1];
1731  int vshift = s->ps.sps->vshift[1];
1732 
1733  intptr_t mx0 = av_mod_uintp2(mv0->x, 2 + hshift);
1734  intptr_t my0 = av_mod_uintp2(mv0->y, 2 + vshift);
1735  intptr_t mx1 = av_mod_uintp2(mv1->x, 2 + hshift);
1736  intptr_t my1 = av_mod_uintp2(mv1->y, 2 + vshift);
1737  intptr_t _mx0 = mx0 << (1 - hshift);
1738  intptr_t _my0 = my0 << (1 - vshift);
1739  intptr_t _mx1 = mx1 << (1 - hshift);
1740  intptr_t _my1 = my1 << (1 - vshift);
1741 
1742  int x_off0 = x_off + (mv0->x >> (2 + hshift));
1743  int y_off0 = y_off + (mv0->y >> (2 + vshift));
1744  int x_off1 = x_off + (mv1->x >> (2 + hshift));
1745  int y_off1 = y_off + (mv1->y >> (2 + vshift));
1746  int idx = hevc_pel_weight[block_w];
1747  src1 += y_off0 * src1stride + (int)((unsigned)x_off0 << s->ps.sps->pixel_shift);
1748  src2 += y_off1 * src2stride + (int)((unsigned)x_off1 << s->ps.sps->pixel_shift);
1749 
1750  if (x_off0 < EPEL_EXTRA_BEFORE || y_off0 < EPEL_EXTRA_AFTER ||
1751  x_off0 >= pic_width - block_w - EPEL_EXTRA_AFTER ||
1752  y_off0 >= pic_height - block_h - EPEL_EXTRA_AFTER) {
1753  const int edge_emu_stride = EDGE_EMU_BUFFER_STRIDE << s->ps.sps->pixel_shift;
1754  int offset1 = EPEL_EXTRA_BEFORE * (src1stride + (1 << s->ps.sps->pixel_shift));
1755  int buf_offset1 = EPEL_EXTRA_BEFORE *
1756  (edge_emu_stride + (1 << s->ps.sps->pixel_shift));
1757 
1758  s->vdsp.emulated_edge_mc(lc->edge_emu_buffer, src1 - offset1,
1759  edge_emu_stride, src1stride,
1760  block_w + EPEL_EXTRA, block_h + EPEL_EXTRA,
1761  x_off0 - EPEL_EXTRA_BEFORE,
1762  y_off0 - EPEL_EXTRA_BEFORE,
1763  pic_width, pic_height);
1764 
1765  src1 = lc->edge_emu_buffer + buf_offset1;
1766  src1stride = edge_emu_stride;
1767  }
1768 
1769  if (x_off1 < EPEL_EXTRA_BEFORE || y_off1 < EPEL_EXTRA_AFTER ||
1770  x_off1 >= pic_width - block_w - EPEL_EXTRA_AFTER ||
1771  y_off1 >= pic_height - block_h - EPEL_EXTRA_AFTER) {
1772  const int edge_emu_stride = EDGE_EMU_BUFFER_STRIDE << s->ps.sps->pixel_shift;
1773  int offset1 = EPEL_EXTRA_BEFORE * (src2stride + (1 << s->ps.sps->pixel_shift));
1774  int buf_offset1 = EPEL_EXTRA_BEFORE *
1775  (edge_emu_stride + (1 << s->ps.sps->pixel_shift));
1776 
1777  s->vdsp.emulated_edge_mc(lc->edge_emu_buffer2, src2 - offset1,
1778  edge_emu_stride, src2stride,
1779  block_w + EPEL_EXTRA, block_h + EPEL_EXTRA,
1780  x_off1 - EPEL_EXTRA_BEFORE,
1781  y_off1 - EPEL_EXTRA_BEFORE,
1782  pic_width, pic_height);
1783 
1784  src2 = lc->edge_emu_buffer2 + buf_offset1;
1785  src2stride = edge_emu_stride;
1786  }
1787 
1788  s->hevcdsp.put_hevc_epel[idx][!!my0][!!mx0](lc->tmp, src1, src1stride,
1789  block_h, _mx0, _my0, block_w);
1790  if (!weight_flag)
1791  s->hevcdsp.put_hevc_epel_bi[idx][!!my1][!!mx1](dst0, s->frame->linesize[cidx+1],
1792  src2, src2stride, lc->tmp,
1793  block_h, _mx1, _my1, block_w);
1794  else
1795  s->hevcdsp.put_hevc_epel_bi_w[idx][!!my1][!!mx1](dst0, s->frame->linesize[cidx+1],
1796  src2, src2stride, lc->tmp,
1797  block_h,
1798  s->sh.chroma_log2_weight_denom,
1799  s->sh.chroma_weight_l0[current_mv->ref_idx[0]][cidx],
1800  s->sh.chroma_weight_l1[current_mv->ref_idx[1]][cidx],
1801  s->sh.chroma_offset_l0[current_mv->ref_idx[0]][cidx],
1802  s->sh.chroma_offset_l1[current_mv->ref_idx[1]][cidx],
1803  _mx1, _my1, block_w);
1804 }
1805 
1807  const Mv *mv, int y0, int height)
1808 {
1809  if (s->threads_type == FF_THREAD_FRAME ) {
1810  int y = FFMAX(0, (mv->y >> 2) + y0 + height + 9);
1811 
1812  ff_thread_await_progress(&ref->tf, y, 0);
1813  }
1814 }
1815 
1816 static void hevc_luma_mv_mvp_mode(HEVCContext *s, int x0, int y0, int nPbW,
1817  int nPbH, int log2_cb_size, int part_idx,
1818  int merge_idx, MvField *mv)
1819 {
1820  HEVCLocalContext *lc = s->HEVClc;
1821  enum InterPredIdc inter_pred_idc = PRED_L0;
1822  int mvp_flag;
1823 
1824  ff_hevc_set_neighbour_available(s, x0, y0, nPbW, nPbH);
1825  mv->pred_flag = 0;
1826  if (s->sh.slice_type == HEVC_SLICE_B)
1827  inter_pred_idc = ff_hevc_inter_pred_idc_decode(s, nPbW, nPbH);
1828 
1829  if (inter_pred_idc != PRED_L1) {
1830  if (s->sh.nb_refs[L0])
1831  mv->ref_idx[0]= ff_hevc_ref_idx_lx_decode(s, s->sh.nb_refs[L0]);
1832 
1833  mv->pred_flag = PF_L0;
1834  ff_hevc_hls_mvd_coding(s, x0, y0, 0);
1835  mvp_flag = ff_hevc_mvp_lx_flag_decode(s);
1836  ff_hevc_luma_mv_mvp_mode(s, x0, y0, nPbW, nPbH, log2_cb_size,
1837  part_idx, merge_idx, mv, mvp_flag, 0);
1838  mv->mv[0].x += lc->pu.mvd.x;
1839  mv->mv[0].y += lc->pu.mvd.y;
1840  }
1841 
1842  if (inter_pred_idc != PRED_L0) {
1843  if (s->sh.nb_refs[L1])
1844  mv->ref_idx[1]= ff_hevc_ref_idx_lx_decode(s, s->sh.nb_refs[L1]);
1845 
1846  if (s->sh.mvd_l1_zero_flag == 1 && inter_pred_idc == PRED_BI) {
1847  AV_ZERO32(&lc->pu.mvd);
1848  } else {
1849  ff_hevc_hls_mvd_coding(s, x0, y0, 1);
1850  }
1851 
1852  mv->pred_flag += PF_L1;
1853  mvp_flag = ff_hevc_mvp_lx_flag_decode(s);
1854  ff_hevc_luma_mv_mvp_mode(s, x0, y0, nPbW, nPbH, log2_cb_size,
1855  part_idx, merge_idx, mv, mvp_flag, 1);
1856  mv->mv[1].x += lc->pu.mvd.x;
1857  mv->mv[1].y += lc->pu.mvd.y;
1858  }
1859 }
1860 
1861 static void hls_prediction_unit(HEVCContext *s, int x0, int y0,
1862  int nPbW, int nPbH,
1863  int log2_cb_size, int partIdx, int idx)
1864 {
1865 #define POS(c_idx, x, y) \
1866  &s->frame->data[c_idx][((y) >> s->ps.sps->vshift[c_idx]) * s->frame->linesize[c_idx] + \
1867  (((x) >> s->ps.sps->hshift[c_idx]) << s->ps.sps->pixel_shift)]
1868  HEVCLocalContext *lc = s->HEVClc;
1869  int merge_idx = 0;
1870  struct MvField current_mv = {{{ 0 }}};
1871 
1872  int min_pu_width = s->ps.sps->min_pu_width;
1873 
1874  MvField *tab_mvf = s->ref->tab_mvf;
1875  RefPicList *refPicList = s->ref->refPicList;
1876  HEVCFrame *ref0 = NULL, *ref1 = NULL;
1877  uint8_t *dst0 = POS(0, x0, y0);
1878  uint8_t *dst1 = POS(1, x0, y0);
1879  uint8_t *dst2 = POS(2, x0, y0);
1880  int log2_min_cb_size = s->ps.sps->log2_min_cb_size;
1881  int min_cb_width = s->ps.sps->min_cb_width;
1882  int x_cb = x0 >> log2_min_cb_size;
1883  int y_cb = y0 >> log2_min_cb_size;
1884  int x_pu, y_pu;
1885  int i, j;
1886 
1887  int skip_flag = SAMPLE_CTB(s->skip_flag, x_cb, y_cb);
1888 
1889  if (!skip_flag)
1891 
1892  if (skip_flag || lc->pu.merge_flag) {
1893  if (s->sh.max_num_merge_cand > 1)
1894  merge_idx = ff_hevc_merge_idx_decode(s);
1895  else
1896  merge_idx = 0;
1897 
1898  ff_hevc_luma_mv_merge_mode(s, x0, y0, nPbW, nPbH, log2_cb_size,
1899  partIdx, merge_idx, &current_mv);
1900  } else {
1901  hevc_luma_mv_mvp_mode(s, x0, y0, nPbW, nPbH, log2_cb_size,
1902  partIdx, merge_idx, &current_mv);
1903  }
1904 
1905  x_pu = x0 >> s->ps.sps->log2_min_pu_size;
1906  y_pu = y0 >> s->ps.sps->log2_min_pu_size;
1907 
1908  for (j = 0; j < nPbH >> s->ps.sps->log2_min_pu_size; j++)
1909  for (i = 0; i < nPbW >> s->ps.sps->log2_min_pu_size; i++)
1910  tab_mvf[(y_pu + j) * min_pu_width + x_pu + i] = current_mv;
1911 
1912  if (current_mv.pred_flag & PF_L0) {
1913  ref0 = refPicList[0].ref[current_mv.ref_idx[0]];
1914  if (!ref0)
1915  return;
1916  hevc_await_progress(s, ref0, &current_mv.mv[0], y0, nPbH);
1917  }
1918  if (current_mv.pred_flag & PF_L1) {
1919  ref1 = refPicList[1].ref[current_mv.ref_idx[1]];
1920  if (!ref1)
1921  return;
1922  hevc_await_progress(s, ref1, &current_mv.mv[1], y0, nPbH);
1923  }
1924 
1925  if (current_mv.pred_flag == PF_L0) {
1926  int x0_c = x0 >> s->ps.sps->hshift[1];
1927  int y0_c = y0 >> s->ps.sps->vshift[1];
1928  int nPbW_c = nPbW >> s->ps.sps->hshift[1];
1929  int nPbH_c = nPbH >> s->ps.sps->vshift[1];
1930 
1931  luma_mc_uni(s, dst0, s->frame->linesize[0], ref0->frame,
1932  &current_mv.mv[0], x0, y0, nPbW, nPbH,
1933  s->sh.luma_weight_l0[current_mv.ref_idx[0]],
1934  s->sh.luma_offset_l0[current_mv.ref_idx[0]]);
1935 
1936  if (s->ps.sps->chroma_format_idc) {
1937  chroma_mc_uni(s, dst1, s->frame->linesize[1], ref0->frame->data[1], ref0->frame->linesize[1],
1938  0, x0_c, y0_c, nPbW_c, nPbH_c, &current_mv,
1939  s->sh.chroma_weight_l0[current_mv.ref_idx[0]][0], s->sh.chroma_offset_l0[current_mv.ref_idx[0]][0]);
1940  chroma_mc_uni(s, dst2, s->frame->linesize[2], ref0->frame->data[2], ref0->frame->linesize[2],
1941  0, x0_c, y0_c, nPbW_c, nPbH_c, &current_mv,
1942  s->sh.chroma_weight_l0[current_mv.ref_idx[0]][1], s->sh.chroma_offset_l0[current_mv.ref_idx[0]][1]);
1943  }
1944  } else if (current_mv.pred_flag == PF_L1) {
1945  int x0_c = x0 >> s->ps.sps->hshift[1];
1946  int y0_c = y0 >> s->ps.sps->vshift[1];
1947  int nPbW_c = nPbW >> s->ps.sps->hshift[1];
1948  int nPbH_c = nPbH >> s->ps.sps->vshift[1];
1949 
1950  luma_mc_uni(s, dst0, s->frame->linesize[0], ref1->frame,
1951  &current_mv.mv[1], x0, y0, nPbW, nPbH,
1952  s->sh.luma_weight_l1[current_mv.ref_idx[1]],
1953  s->sh.luma_offset_l1[current_mv.ref_idx[1]]);
1954 
1955  if (s->ps.sps->chroma_format_idc) {
1956  chroma_mc_uni(s, dst1, s->frame->linesize[1], ref1->frame->data[1], ref1->frame->linesize[1],
1957  1, x0_c, y0_c, nPbW_c, nPbH_c, &current_mv,
1958  s->sh.chroma_weight_l1[current_mv.ref_idx[1]][0], s->sh.chroma_offset_l1[current_mv.ref_idx[1]][0]);
1959 
1960  chroma_mc_uni(s, dst2, s->frame->linesize[2], ref1->frame->data[2], ref1->frame->linesize[2],
1961  1, x0_c, y0_c, nPbW_c, nPbH_c, &current_mv,
1962  s->sh.chroma_weight_l1[current_mv.ref_idx[1]][1], s->sh.chroma_offset_l1[current_mv.ref_idx[1]][1]);
1963  }
1964  } else if (current_mv.pred_flag == PF_BI) {
1965  int x0_c = x0 >> s->ps.sps->hshift[1];
1966  int y0_c = y0 >> s->ps.sps->vshift[1];
1967  int nPbW_c = nPbW >> s->ps.sps->hshift[1];
1968  int nPbH_c = nPbH >> s->ps.sps->vshift[1];
1969 
1970  luma_mc_bi(s, dst0, s->frame->linesize[0], ref0->frame,
1971  &current_mv.mv[0], x0, y0, nPbW, nPbH,
1972  ref1->frame, &current_mv.mv[1], &current_mv);
1973 
1974  if (s->ps.sps->chroma_format_idc) {
1975  chroma_mc_bi(s, dst1, s->frame->linesize[1], ref0->frame, ref1->frame,
1976  x0_c, y0_c, nPbW_c, nPbH_c, &current_mv, 0);
1977 
1978  chroma_mc_bi(s, dst2, s->frame->linesize[2], ref0->frame, ref1->frame,
1979  x0_c, y0_c, nPbW_c, nPbH_c, &current_mv, 1);
1980  }
1981  }
1982 }
1983 
1984 /**
1985  * 8.4.1
1986  */
1987 static int luma_intra_pred_mode(HEVCContext *s, int x0, int y0, int pu_size,
1988  int prev_intra_luma_pred_flag)
1989 {
1990  HEVCLocalContext *lc = s->HEVClc;
1991  int x_pu = x0 >> s->ps.sps->log2_min_pu_size;
1992  int y_pu = y0 >> s->ps.sps->log2_min_pu_size;
1993  int min_pu_width = s->ps.sps->min_pu_width;
1994  int size_in_pus = pu_size >> s->ps.sps->log2_min_pu_size;
1995  int x0b = av_mod_uintp2(x0, s->ps.sps->log2_ctb_size);
1996  int y0b = av_mod_uintp2(y0, s->ps.sps->log2_ctb_size);
1997 
1998  int cand_up = (lc->ctb_up_flag || y0b) ?
1999  s->tab_ipm[(y_pu - 1) * min_pu_width + x_pu] : INTRA_DC;
2000  int cand_left = (lc->ctb_left_flag || x0b) ?
2001  s->tab_ipm[y_pu * min_pu_width + x_pu - 1] : INTRA_DC;
2002 
2003  int y_ctb = (y0 >> (s->ps.sps->log2_ctb_size)) << (s->ps.sps->log2_ctb_size);
2004 
2005  MvField *tab_mvf = s->ref->tab_mvf;
2006  int intra_pred_mode;
2007  int candidate[3];
2008  int i, j;
2009 
2010  // intra_pred_mode prediction does not cross vertical CTB boundaries
2011  if ((y0 - 1) < y_ctb)
2012  cand_up = INTRA_DC;
2013 
2014  if (cand_left == cand_up) {
2015  if (cand_left < 2) {
2016  candidate[0] = INTRA_PLANAR;
2017  candidate[1] = INTRA_DC;
2018  candidate[2] = INTRA_ANGULAR_26;
2019  } else {
2020  candidate[0] = cand_left;
2021  candidate[1] = 2 + ((cand_left - 2 - 1 + 32) & 31);
2022  candidate[2] = 2 + ((cand_left - 2 + 1) & 31);
2023  }
2024  } else {
2025  candidate[0] = cand_left;
2026  candidate[1] = cand_up;
2027  if (candidate[0] != INTRA_PLANAR && candidate[1] != INTRA_PLANAR) {
2028  candidate[2] = INTRA_PLANAR;
2029  } else if (candidate[0] != INTRA_DC && candidate[1] != INTRA_DC) {
2030  candidate[2] = INTRA_DC;
2031  } else {
2032  candidate[2] = INTRA_ANGULAR_26;
2033  }
2034  }
2035 
2036  if (prev_intra_luma_pred_flag) {
2037  intra_pred_mode = candidate[lc->pu.mpm_idx];
2038  } else {
2039  if (candidate[0] > candidate[1])
2040  FFSWAP(uint8_t, candidate[0], candidate[1]);
2041  if (candidate[0] > candidate[2])
2042  FFSWAP(uint8_t, candidate[0], candidate[2]);
2043  if (candidate[1] > candidate[2])
2044  FFSWAP(uint8_t, candidate[1], candidate[2]);
2045 
2046  intra_pred_mode = lc->pu.rem_intra_luma_pred_mode;
2047  for (i = 0; i < 3; i++)
2048  if (intra_pred_mode >= candidate[i])
2049  intra_pred_mode++;
2050  }
2051 
2052  /* write the intra prediction units into the mv array */
2053  if (!size_in_pus)
2054  size_in_pus = 1;
2055  for (i = 0; i < size_in_pus; i++) {
2056  memset(&s->tab_ipm[(y_pu + i) * min_pu_width + x_pu],
2057  intra_pred_mode, size_in_pus);
2058 
2059  for (j = 0; j < size_in_pus; j++) {
2060  tab_mvf[(y_pu + j) * min_pu_width + x_pu + i].pred_flag = PF_INTRA;
2061  }
2062  }
2063 
2064  return intra_pred_mode;
2065 }
2066 
2067 static av_always_inline void set_ct_depth(HEVCContext *s, int x0, int y0,
2068  int log2_cb_size, int ct_depth)
2069 {
2070  int length = (1 << log2_cb_size) >> s->ps.sps->log2_min_cb_size;
2071  int x_cb = x0 >> s->ps.sps->log2_min_cb_size;
2072  int y_cb = y0 >> s->ps.sps->log2_min_cb_size;
2073  int y;
2074 
2075  for (y = 0; y < length; y++)
2076  memset(&s->tab_ct_depth[(y_cb + y) * s->ps.sps->min_cb_width + x_cb],
2077  ct_depth, length);
2078 }
2079 
2080 static const uint8_t tab_mode_idx[] = {
2081  0, 1, 2, 2, 2, 2, 3, 5, 7, 8, 10, 12, 13, 15, 17, 18, 19, 20,
2082  21, 22, 23, 23, 24, 24, 25, 25, 26, 27, 27, 28, 28, 29, 29, 30, 31};
2083 
2084 static void intra_prediction_unit(HEVCContext *s, int x0, int y0,
2085  int log2_cb_size)
2086 {
2087  HEVCLocalContext *lc = s->HEVClc;
2088  static const uint8_t intra_chroma_table[4] = { 0, 26, 10, 1 };
2089  uint8_t prev_intra_luma_pred_flag[4];
2090  int split = lc->cu.part_mode == PART_NxN;
2091  int pb_size = (1 << log2_cb_size) >> split;
2092  int side = split + 1;
2093  int chroma_mode;
2094  int i, j;
2095 
2096  for (i = 0; i < side; i++)
2097  for (j = 0; j < side; j++)
2098  prev_intra_luma_pred_flag[2 * i + j] = ff_hevc_prev_intra_luma_pred_flag_decode(s);
2099 
2100  for (i = 0; i < side; i++) {
2101  for (j = 0; j < side; j++) {
2102  if (prev_intra_luma_pred_flag[2 * i + j])
2104  else
2106 
2107  lc->pu.intra_pred_mode[2 * i + j] =
2108  luma_intra_pred_mode(s, x0 + pb_size * j, y0 + pb_size * i, pb_size,
2109  prev_intra_luma_pred_flag[2 * i + j]);
2110  }
2111  }
2112 
2113  if (s->ps.sps->chroma_format_idc == 3) {
2114  for (i = 0; i < side; i++) {
2115  for (j = 0; j < side; j++) {
2116  lc->pu.chroma_mode_c[2 * i + j] = chroma_mode = ff_hevc_intra_chroma_pred_mode_decode(s);
2117  if (chroma_mode != 4) {
2118  if (lc->pu.intra_pred_mode[2 * i + j] == intra_chroma_table[chroma_mode])
2119  lc->pu.intra_pred_mode_c[2 * i + j] = 34;
2120  else
2121  lc->pu.intra_pred_mode_c[2 * i + j] = intra_chroma_table[chroma_mode];
2122  } else {
2123  lc->pu.intra_pred_mode_c[2 * i + j] = lc->pu.intra_pred_mode[2 * i + j];
2124  }
2125  }
2126  }
2127  } else if (s->ps.sps->chroma_format_idc == 2) {
2128  int mode_idx;
2129  lc->pu.chroma_mode_c[0] = chroma_mode = ff_hevc_intra_chroma_pred_mode_decode(s);
2130  if (chroma_mode != 4) {
2131  if (lc->pu.intra_pred_mode[0] == intra_chroma_table[chroma_mode])
2132  mode_idx = 34;
2133  else
2134  mode_idx = intra_chroma_table[chroma_mode];
2135  } else {
2136  mode_idx = lc->pu.intra_pred_mode[0];
2137  }
2138  lc->pu.intra_pred_mode_c[0] = tab_mode_idx[mode_idx];
2139  } else if (s->ps.sps->chroma_format_idc != 0) {
2141  if (chroma_mode != 4) {
2142  if (lc->pu.intra_pred_mode[0] == intra_chroma_table[chroma_mode])
2143  lc->pu.intra_pred_mode_c[0] = 34;
2144  else
2145  lc->pu.intra_pred_mode_c[0] = intra_chroma_table[chroma_mode];
2146  } else {
2147  lc->pu.intra_pred_mode_c[0] = lc->pu.intra_pred_mode[0];
2148  }
2149  }
2150 }
2151 
2153  int x0, int y0,
2154  int log2_cb_size)
2155 {
2156  HEVCLocalContext *lc = s->HEVClc;
2157  int pb_size = 1 << log2_cb_size;
2158  int size_in_pus = pb_size >> s->ps.sps->log2_min_pu_size;
2159  int min_pu_width = s->ps.sps->min_pu_width;
2160  MvField *tab_mvf = s->ref->tab_mvf;
2161  int x_pu = x0 >> s->ps.sps->log2_min_pu_size;
2162  int y_pu = y0 >> s->ps.sps->log2_min_pu_size;
2163  int j, k;
2164 
2165  if (size_in_pus == 0)
2166  size_in_pus = 1;
2167  for (j = 0; j < size_in_pus; j++)
2168  memset(&s->tab_ipm[(y_pu + j) * min_pu_width + x_pu], INTRA_DC, size_in_pus);
2169  if (lc->cu.pred_mode == MODE_INTRA)
2170  for (j = 0; j < size_in_pus; j++)
2171  for (k = 0; k < size_in_pus; k++)
2172  tab_mvf[(y_pu + j) * min_pu_width + x_pu + k].pred_flag = PF_INTRA;
2173 }
2174 
2175 static int hls_coding_unit(HEVCContext *s, int x0, int y0, int log2_cb_size)
2176 {
2177  int cb_size = 1 << log2_cb_size;
2178  HEVCLocalContext *lc = s->HEVClc;
2179  int log2_min_cb_size = s->ps.sps->log2_min_cb_size;
2180  int length = cb_size >> log2_min_cb_size;
2181  int min_cb_width = s->ps.sps->min_cb_width;
2182  int x_cb = x0 >> log2_min_cb_size;
2183  int y_cb = y0 >> log2_min_cb_size;
2184  int idx = log2_cb_size - 2;
2185  int qp_block_mask = (1<<(s->ps.sps->log2_ctb_size - s->ps.pps->diff_cu_qp_delta_depth)) - 1;
2186  int x, y, ret;
2187 
2188  lc->cu.x = x0;
2189  lc->cu.y = y0;
2190  lc->cu.pred_mode = MODE_INTRA;
2191  lc->cu.part_mode = PART_2Nx2N;
2192  lc->cu.intra_split_flag = 0;
2193 
2194  SAMPLE_CTB(s->skip_flag, x_cb, y_cb) = 0;
2195  for (x = 0; x < 4; x++)
2196  lc->pu.intra_pred_mode[x] = 1;
2197  if (s->ps.pps->transquant_bypass_enable_flag) {
2199  if (lc->cu.cu_transquant_bypass_flag)
2200  set_deblocking_bypass(s, x0, y0, log2_cb_size);
2201  } else
2202  lc->cu.cu_transquant_bypass_flag = 0;
2203 
2204  if (s->sh.slice_type != HEVC_SLICE_I) {
2205  uint8_t skip_flag = ff_hevc_skip_flag_decode(s, x0, y0, x_cb, y_cb);
2206 
2207  x = y_cb * min_cb_width + x_cb;
2208  for (y = 0; y < length; y++) {
2209  memset(&s->skip_flag[x], skip_flag, length);
2210  x += min_cb_width;
2211  }
2212  lc->cu.pred_mode = skip_flag ? MODE_SKIP : MODE_INTER;
2213  } else {
2214  x = y_cb * min_cb_width + x_cb;
2215  for (y = 0; y < length; y++) {
2216  memset(&s->skip_flag[x], 0, length);
2217  x += min_cb_width;
2218  }
2219  }
2220 
2221  if (SAMPLE_CTB(s->skip_flag, x_cb, y_cb)) {
2222  hls_prediction_unit(s, x0, y0, cb_size, cb_size, log2_cb_size, 0, idx);
2223  intra_prediction_unit_default_value(s, x0, y0, log2_cb_size);
2224 
2225  if (!s->sh.disable_deblocking_filter_flag)
2226  ff_hevc_deblocking_boundary_strengths(s, x0, y0, log2_cb_size);
2227  } else {
2228  int pcm_flag = 0;
2229 
2230  if (s->sh.slice_type != HEVC_SLICE_I)
2232  if (lc->cu.pred_mode != MODE_INTRA ||
2233  log2_cb_size == s->ps.sps->log2_min_cb_size) {
2234  lc->cu.part_mode = ff_hevc_part_mode_decode(s, log2_cb_size);
2235  lc->cu.intra_split_flag = lc->cu.part_mode == PART_NxN &&
2236  lc->cu.pred_mode == MODE_INTRA;
2237  }
2238 
2239  if (lc->cu.pred_mode == MODE_INTRA) {
2240  if (lc->cu.part_mode == PART_2Nx2N && s->ps.sps->pcm_enabled_flag &&
2241  log2_cb_size >= s->ps.sps->pcm.log2_min_pcm_cb_size &&
2242  log2_cb_size <= s->ps.sps->pcm.log2_max_pcm_cb_size) {
2243  pcm_flag = ff_hevc_pcm_flag_decode(s);
2244  }
2245  if (pcm_flag) {
2246  intra_prediction_unit_default_value(s, x0, y0, log2_cb_size);
2247  ret = hls_pcm_sample(s, x0, y0, log2_cb_size);
2248  if (s->ps.sps->pcm.loop_filter_disable_flag)
2249  set_deblocking_bypass(s, x0, y0, log2_cb_size);
2250 
2251  if (ret < 0)
2252  return ret;
2253  } else {
2254  intra_prediction_unit(s, x0, y0, log2_cb_size);
2255  }
2256  } else {
2257  intra_prediction_unit_default_value(s, x0, y0, log2_cb_size);
2258  switch (lc->cu.part_mode) {
2259  case PART_2Nx2N:
2260  hls_prediction_unit(s, x0, y0, cb_size, cb_size, log2_cb_size, 0, idx);
2261  break;
2262  case PART_2NxN:
2263  hls_prediction_unit(s, x0, y0, cb_size, cb_size / 2, log2_cb_size, 0, idx);
2264  hls_prediction_unit(s, x0, y0 + cb_size / 2, cb_size, cb_size / 2, log2_cb_size, 1, idx);
2265  break;
2266  case PART_Nx2N:
2267  hls_prediction_unit(s, x0, y0, cb_size / 2, cb_size, log2_cb_size, 0, idx - 1);
2268  hls_prediction_unit(s, x0 + cb_size / 2, y0, cb_size / 2, cb_size, log2_cb_size, 1, idx - 1);
2269  break;
2270  case PART_2NxnU:
2271  hls_prediction_unit(s, x0, y0, cb_size, cb_size / 4, log2_cb_size, 0, idx);
2272  hls_prediction_unit(s, x0, y0 + cb_size / 4, cb_size, cb_size * 3 / 4, log2_cb_size, 1, idx);
2273  break;
2274  case PART_2NxnD:
2275  hls_prediction_unit(s, x0, y0, cb_size, cb_size * 3 / 4, log2_cb_size, 0, idx);
2276  hls_prediction_unit(s, x0, y0 + cb_size * 3 / 4, cb_size, cb_size / 4, log2_cb_size, 1, idx);
2277  break;
2278  case PART_nLx2N:
2279  hls_prediction_unit(s, x0, y0, cb_size / 4, cb_size, log2_cb_size, 0, idx - 2);
2280  hls_prediction_unit(s, x0 + cb_size / 4, y0, cb_size * 3 / 4, cb_size, log2_cb_size, 1, idx - 2);
2281  break;
2282  case PART_nRx2N:
2283  hls_prediction_unit(s, x0, y0, cb_size * 3 / 4, cb_size, log2_cb_size, 0, idx - 2);
2284  hls_prediction_unit(s, x0 + cb_size * 3 / 4, y0, cb_size / 4, cb_size, log2_cb_size, 1, idx - 2);
2285  break;
2286  case PART_NxN:
2287  hls_prediction_unit(s, x0, y0, cb_size / 2, cb_size / 2, log2_cb_size, 0, idx - 1);
2288  hls_prediction_unit(s, x0 + cb_size / 2, y0, cb_size / 2, cb_size / 2, log2_cb_size, 1, idx - 1);
2289  hls_prediction_unit(s, x0, y0 + cb_size / 2, cb_size / 2, cb_size / 2, log2_cb_size, 2, idx - 1);
2290  hls_prediction_unit(s, x0 + cb_size / 2, y0 + cb_size / 2, cb_size / 2, cb_size / 2, log2_cb_size, 3, idx - 1);
2291  break;
2292  }
2293  }
2294 
2295  if (!pcm_flag) {
2296  int rqt_root_cbf = 1;
2297 
2298  if (lc->cu.pred_mode != MODE_INTRA &&
2299  !(lc->cu.part_mode == PART_2Nx2N && lc->pu.merge_flag)) {
2300  rqt_root_cbf = ff_hevc_no_residual_syntax_flag_decode(s);
2301  }
2302  if (rqt_root_cbf) {
2303  const static int cbf[2] = { 0 };
2304  lc->cu.max_trafo_depth = lc->cu.pred_mode == MODE_INTRA ?
2305  s->ps.sps->max_transform_hierarchy_depth_intra + lc->cu.intra_split_flag :
2306  s->ps.sps->max_transform_hierarchy_depth_inter;
2307  ret = hls_transform_tree(s, x0, y0, x0, y0, x0, y0,
2308  log2_cb_size,
2309  log2_cb_size, 0, 0, cbf, cbf);
2310  if (ret < 0)
2311  return ret;
2312  } else {
2313  if (!s->sh.disable_deblocking_filter_flag)
2314  ff_hevc_deblocking_boundary_strengths(s, x0, y0, log2_cb_size);
2315  }
2316  }
2317  }
2318 
2319  if (s->ps.pps->cu_qp_delta_enabled_flag && lc->tu.is_cu_qp_delta_coded == 0)
2320  ff_hevc_set_qPy(s, x0, y0, log2_cb_size);
2321 
2322  x = y_cb * min_cb_width + x_cb;
2323  for (y = 0; y < length; y++) {
2324  memset(&s->qp_y_tab[x], lc->qp_y, length);
2325  x += min_cb_width;
2326  }
2327 
2328  if(((x0 + (1<<log2_cb_size)) & qp_block_mask) == 0 &&
2329  ((y0 + (1<<log2_cb_size)) & qp_block_mask) == 0) {
2330  lc->qPy_pred = lc->qp_y;
2331  }
2332 
2333  set_ct_depth(s, x0, y0, log2_cb_size, lc->ct_depth);
2334 
2335  return 0;
2336 }
2337 
2338 static int hls_coding_quadtree(HEVCContext *s, int x0, int y0,
2339  int log2_cb_size, int cb_depth)
2340 {
2341  HEVCLocalContext *lc = s->HEVClc;
2342  const int cb_size = 1 << log2_cb_size;
2343  int ret;
2344  int split_cu;
2345 
2346  lc->ct_depth = cb_depth;
2347  if (x0 + cb_size <= s->ps.sps->width &&
2348  y0 + cb_size <= s->ps.sps->height &&
2349  log2_cb_size > s->ps.sps->log2_min_cb_size) {
2350  split_cu = ff_hevc_split_coding_unit_flag_decode(s, cb_depth, x0, y0);
2351  } else {
2352  split_cu = (log2_cb_size > s->ps.sps->log2_min_cb_size);
2353  }
2354  if (s->ps.pps->cu_qp_delta_enabled_flag &&
2355  log2_cb_size >= s->ps.sps->log2_ctb_size - s->ps.pps->diff_cu_qp_delta_depth) {
2356  lc->tu.is_cu_qp_delta_coded = 0;
2357  lc->tu.cu_qp_delta = 0;
2358  }
2359 
2360  if (s->sh.cu_chroma_qp_offset_enabled_flag &&
2361  log2_cb_size >= s->ps.sps->log2_ctb_size - s->ps.pps->diff_cu_chroma_qp_offset_depth) {
2363  }
2364 
2365  if (split_cu) {
2366  int qp_block_mask = (1<<(s->ps.sps->log2_ctb_size - s->ps.pps->diff_cu_qp_delta_depth)) - 1;
2367  const int cb_size_split = cb_size >> 1;
2368  const int x1 = x0 + cb_size_split;
2369  const int y1 = y0 + cb_size_split;
2370 
2371  int more_data = 0;
2372 
2373  more_data = hls_coding_quadtree(s, x0, y0, log2_cb_size - 1, cb_depth + 1);
2374  if (more_data < 0)
2375  return more_data;
2376 
2377  if (more_data && x1 < s->ps.sps->width) {
2378  more_data = hls_coding_quadtree(s, x1, y0, log2_cb_size - 1, cb_depth + 1);
2379  if (more_data < 0)
2380  return more_data;
2381  }
2382  if (more_data && y1 < s->ps.sps->height) {
2383  more_data = hls_coding_quadtree(s, x0, y1, log2_cb_size - 1, cb_depth + 1);
2384  if (more_data < 0)
2385  return more_data;
2386  }
2387  if (more_data && x1 < s->ps.sps->width &&
2388  y1 < s->ps.sps->height) {
2389  more_data = hls_coding_quadtree(s, x1, y1, log2_cb_size - 1, cb_depth + 1);
2390  if (more_data < 0)
2391  return more_data;
2392  }
2393 
2394  if(((x0 + (1<<log2_cb_size)) & qp_block_mask) == 0 &&
2395  ((y0 + (1<<log2_cb_size)) & qp_block_mask) == 0)
2396  lc->qPy_pred = lc->qp_y;
2397 
2398  if (more_data)
2399  return ((x1 + cb_size_split) < s->ps.sps->width ||
2400  (y1 + cb_size_split) < s->ps.sps->height);
2401  else
2402  return 0;
2403  } else {
2404  ret = hls_coding_unit(s, x0, y0, log2_cb_size);
2405  if (ret < 0)
2406  return ret;
2407  if ((!((x0 + cb_size) %
2408  (1 << (s->ps.sps->log2_ctb_size))) ||
2409  (x0 + cb_size >= s->ps.sps->width)) &&
2410  (!((y0 + cb_size) %
2411  (1 << (s->ps.sps->log2_ctb_size))) ||
2412  (y0 + cb_size >= s->ps.sps->height))) {
2413  int end_of_slice_flag = ff_hevc_end_of_slice_flag_decode(s);
2414  return !end_of_slice_flag;
2415  } else {
2416  return 1;
2417  }
2418  }
2419 
2420  return 0;
2421 }
2422 
2423 static void hls_decode_neighbour(HEVCContext *s, int x_ctb, int y_ctb,
2424  int ctb_addr_ts)
2425 {
2426  HEVCLocalContext *lc = s->HEVClc;
2427  int ctb_size = 1 << s->ps.sps->log2_ctb_size;
2428  int ctb_addr_rs = s->ps.pps->ctb_addr_ts_to_rs[ctb_addr_ts];
2429  int ctb_addr_in_slice = ctb_addr_rs - s->sh.slice_addr;
2430 
2431  s->tab_slice_address[ctb_addr_rs] = s->sh.slice_addr;
2432 
2433  if (s->ps.pps->entropy_coding_sync_enabled_flag) {
2434  if (x_ctb == 0 && (y_ctb & (ctb_size - 1)) == 0)
2435  lc->first_qp_group = 1;
2436  lc->end_of_tiles_x = s->ps.sps->width;
2437  } else if (s->ps.pps->tiles_enabled_flag) {
2438  if (ctb_addr_ts && s->ps.pps->tile_id[ctb_addr_ts] != s->ps.pps->tile_id[ctb_addr_ts - 1]) {
2439  int idxX = s->ps.pps->col_idxX[x_ctb >> s->ps.sps->log2_ctb_size];
2440  lc->end_of_tiles_x = x_ctb + (s->ps.pps->column_width[idxX] << s->ps.sps->log2_ctb_size);
2441  lc->first_qp_group = 1;
2442  }
2443  } else {
2444  lc->end_of_tiles_x = s->ps.sps->width;
2445  }
2446 
2447  lc->end_of_tiles_y = FFMIN(y_ctb + ctb_size, s->ps.sps->height);
2448 
2449  lc->boundary_flags = 0;
2450  if (s->ps.pps->tiles_enabled_flag) {
2451  if (x_ctb > 0 && s->ps.pps->tile_id[ctb_addr_ts] != s->ps.pps->tile_id[s->ps.pps->ctb_addr_rs_to_ts[ctb_addr_rs - 1]])
2453  if (x_ctb > 0 && s->tab_slice_address[ctb_addr_rs] != s->tab_slice_address[ctb_addr_rs - 1])
2455  if (y_ctb > 0 && s->ps.pps->tile_id[ctb_addr_ts] != s->ps.pps->tile_id[s->ps.pps->ctb_addr_rs_to_ts[ctb_addr_rs - s->ps.sps->ctb_width]])
2457  if (y_ctb > 0 && s->tab_slice_address[ctb_addr_rs] != s->tab_slice_address[ctb_addr_rs - s->ps.sps->ctb_width])
2459  } else {
2460  if (ctb_addr_in_slice <= 0)
2462  if (ctb_addr_in_slice < s->ps.sps->ctb_width)
2464  }
2465 
2466  lc->ctb_left_flag = ((x_ctb > 0) && (ctb_addr_in_slice > 0) && !(lc->boundary_flags & BOUNDARY_LEFT_TILE));
2467  lc->ctb_up_flag = ((y_ctb > 0) && (ctb_addr_in_slice >= s->ps.sps->ctb_width) && !(lc->boundary_flags & BOUNDARY_UPPER_TILE));
2468  lc->ctb_up_right_flag = ((y_ctb > 0) && (ctb_addr_in_slice+1 >= s->ps.sps->ctb_width) && (s->ps.pps->tile_id[ctb_addr_ts] == s->ps.pps->tile_id[s->ps.pps->ctb_addr_rs_to_ts[ctb_addr_rs+1 - s->ps.sps->ctb_width]]));
2469  lc->ctb_up_left_flag = ((x_ctb > 0) && (y_ctb > 0) && (ctb_addr_in_slice-1 >= s->ps.sps->ctb_width) && (s->ps.pps->tile_id[ctb_addr_ts] == s->ps.pps->tile_id[s->ps.pps->ctb_addr_rs_to_ts[ctb_addr_rs-1 - s->ps.sps->ctb_width]]));
2470 }
2471 
2472 static int hls_decode_entry(AVCodecContext *avctxt, void *isFilterThread)
2473 {
2474  HEVCContext *s = avctxt->priv_data;
2475  int ctb_size = 1 << s->ps.sps->log2_ctb_size;
2476  int more_data = 1;
2477  int x_ctb = 0;
2478  int y_ctb = 0;
2479  int ctb_addr_ts = s->ps.pps->ctb_addr_rs_to_ts[s->sh.slice_ctb_addr_rs];
2480  int ret;
2481 
2482  if (!ctb_addr_ts && s->sh.dependent_slice_segment_flag) {
2483  av_log(s->avctx, AV_LOG_ERROR, "Impossible initial tile.\n");
2484  return AVERROR_INVALIDDATA;
2485  }
2486 
2487  if (s->sh.dependent_slice_segment_flag) {
2488  int prev_rs = s->ps.pps->ctb_addr_ts_to_rs[ctb_addr_ts - 1];
2489  if (s->tab_slice_address[prev_rs] != s->sh.slice_addr) {
2490  av_log(s->avctx, AV_LOG_ERROR, "Previous slice segment missing\n");
2491  return AVERROR_INVALIDDATA;
2492  }
2493  }
2494 
2495  while (more_data && ctb_addr_ts < s->ps.sps->ctb_size) {
2496  int ctb_addr_rs = s->ps.pps->ctb_addr_ts_to_rs[ctb_addr_ts];
2497 
2498  x_ctb = (ctb_addr_rs % ((s->ps.sps->width + ctb_size - 1) >> s->ps.sps->log2_ctb_size)) << s->ps.sps->log2_ctb_size;
2499  y_ctb = (ctb_addr_rs / ((s->ps.sps->width + ctb_size - 1) >> s->ps.sps->log2_ctb_size)) << s->ps.sps->log2_ctb_size;
2500  hls_decode_neighbour(s, x_ctb, y_ctb, ctb_addr_ts);
2501 
2502  ret = ff_hevc_cabac_init(s, ctb_addr_ts, 0);
2503  if (ret < 0) {
2504  s->tab_slice_address[ctb_addr_rs] = -1;
2505  return ret;
2506  }
2507 
2508  hls_sao_param(s, x_ctb >> s->ps.sps->log2_ctb_size, y_ctb >> s->ps.sps->log2_ctb_size);
2509 
2510  s->deblock[ctb_addr_rs].beta_offset = s->sh.beta_offset;
2511  s->deblock[ctb_addr_rs].tc_offset = s->sh.tc_offset;
2512  s->filter_slice_edges[ctb_addr_rs] = s->sh.slice_loop_filter_across_slices_enabled_flag;
2513 
2514  more_data = hls_coding_quadtree(s, x_ctb, y_ctb, s->ps.sps->log2_ctb_size, 0);
2515  if (more_data < 0) {
2516  s->tab_slice_address[ctb_addr_rs] = -1;
2517  return more_data;
2518  }
2519 
2520 
2521  ctb_addr_ts++;
2522  ff_hevc_save_states(s, ctb_addr_ts);
2523  ff_hevc_hls_filters(s, x_ctb, y_ctb, ctb_size);
2524  }
2525 
2526  if (x_ctb + ctb_size >= s->ps.sps->width &&
2527  y_ctb + ctb_size >= s->ps.sps->height)
2528  ff_hevc_hls_filter(s, x_ctb, y_ctb, ctb_size);
2529 
2530  return ctb_addr_ts;
2531 }
2532 
2534 {
2535  int arg[2];
2536  int ret[2];
2537 
2538  arg[0] = 0;
2539  arg[1] = 1;
2540 
2541  s->avctx->execute(s->avctx, hls_decode_entry, arg, ret , 1, sizeof(int));
2542  return ret[0];
2543 }
2544 static int hls_decode_entry_wpp(AVCodecContext *avctxt, void *input_ctb_row, int job, int self_id)
2545 {
2546  HEVCContext *s1 = avctxt->priv_data, *s;
2547  HEVCLocalContext *lc;
2548  int ctb_size = 1<< s1->ps.sps->log2_ctb_size;
2549  int more_data = 1;
2550  int *ctb_row_p = input_ctb_row;
2551  int ctb_row = ctb_row_p[job];
2552  int ctb_addr_rs = s1->sh.slice_ctb_addr_rs + ctb_row * ((s1->ps.sps->width + ctb_size - 1) >> s1->ps.sps->log2_ctb_size);
2553  int ctb_addr_ts = s1->ps.pps->ctb_addr_rs_to_ts[ctb_addr_rs];
2554  int thread = ctb_row % s1->threads_number;
2555  int ret;
2556 
2557  s = s1->sList[self_id];
2558  lc = s->HEVClc;
2559 
2560  if(ctb_row) {
2561  ret = init_get_bits8(&lc->gb, s->data + s->sh.offset[ctb_row - 1], s->sh.size[ctb_row - 1]);
2562  if (ret < 0)
2563  goto error;
2564  ff_init_cabac_decoder(&lc->cc, s->data + s->sh.offset[(ctb_row)-1], s->sh.size[ctb_row - 1]);
2565  }
2566 
2567  while(more_data && ctb_addr_ts < s->ps.sps->ctb_size) {
2568  int x_ctb = (ctb_addr_rs % s->ps.sps->ctb_width) << s->ps.sps->log2_ctb_size;
2569  int y_ctb = (ctb_addr_rs / s->ps.sps->ctb_width) << s->ps.sps->log2_ctb_size;
2570 
2571  hls_decode_neighbour(s, x_ctb, y_ctb, ctb_addr_ts);
2572 
2573  ff_thread_await_progress2(s->avctx, ctb_row, thread, SHIFT_CTB_WPP);
2574 
2575  if (atomic_load(&s1->wpp_err)) {
2576  ff_thread_report_progress2(s->avctx, ctb_row , thread, SHIFT_CTB_WPP);
2577  return 0;
2578  }
2579 
2580  ret = ff_hevc_cabac_init(s, ctb_addr_ts, thread);
2581  if (ret < 0)
2582  goto error;
2583  hls_sao_param(s, x_ctb >> s->ps.sps->log2_ctb_size, y_ctb >> s->ps.sps->log2_ctb_size);
2584  more_data = hls_coding_quadtree(s, x_ctb, y_ctb, s->ps.sps->log2_ctb_size, 0);
2585 
2586  if (more_data < 0) {
2587  ret = more_data;
2588  goto error;
2589  }
2590 
2591  ctb_addr_ts++;
2592 
2593  ff_hevc_save_states(s, ctb_addr_ts);
2594  ff_thread_report_progress2(s->avctx, ctb_row, thread, 1);
2595  ff_hevc_hls_filters(s, x_ctb, y_ctb, ctb_size);
2596 
2597  if (!more_data && (x_ctb+ctb_size) < s->ps.sps->width && ctb_row != s->sh.num_entry_point_offsets) {
2598  atomic_store(&s1->wpp_err, 1);
2599  ff_thread_report_progress2(s->avctx, ctb_row ,thread, SHIFT_CTB_WPP);
2600  return 0;
2601  }
2602 
2603  if ((x_ctb+ctb_size) >= s->ps.sps->width && (y_ctb+ctb_size) >= s->ps.sps->height ) {
2604  ff_hevc_hls_filter(s, x_ctb, y_ctb, ctb_size);
2605  ff_thread_report_progress2(s->avctx, ctb_row , thread, SHIFT_CTB_WPP);
2606  return ctb_addr_ts;
2607  }
2608  ctb_addr_rs = s->ps.pps->ctb_addr_ts_to_rs[ctb_addr_ts];
2609  x_ctb+=ctb_size;
2610 
2611  if(x_ctb >= s->ps.sps->width) {
2612  break;
2613  }
2614  }
2615  ff_thread_report_progress2(s->avctx, ctb_row ,thread, SHIFT_CTB_WPP);
2616 
2617  return 0;
2618 error:
2619  s->tab_slice_address[ctb_addr_rs] = -1;
2620  atomic_store(&s1->wpp_err, 1);
2621  ff_thread_report_progress2(s->avctx, ctb_row ,thread, SHIFT_CTB_WPP);
2622  return ret;
2623 }
2624 
2625 static int hls_slice_data_wpp(HEVCContext *s, const H2645NAL *nal)
2626 {
2627  const uint8_t *data = nal->data;
2628  int length = nal->size;
2629  HEVCLocalContext *lc = s->HEVClc;
2630  int *ret = av_malloc_array(s->sh.num_entry_point_offsets + 1, sizeof(int));
2631  int *arg = av_malloc_array(s->sh.num_entry_point_offsets + 1, sizeof(int));
2632  int64_t offset;
2633  int64_t startheader, cmpt = 0;
2634  int i, j, res = 0;
2635 
2636  if (!ret || !arg) {
2637  av_free(ret);
2638  av_free(arg);
2639  return AVERROR(ENOMEM);
2640  }
2641 
2642  if (s->sh.slice_ctb_addr_rs + s->sh.num_entry_point_offsets * s->ps.sps->ctb_width >= s->ps.sps->ctb_width * s->ps.sps->ctb_height) {
2643  av_log(s->avctx, AV_LOG_ERROR, "WPP ctb addresses are wrong (%d %d %d %d)\n",
2644  s->sh.slice_ctb_addr_rs, s->sh.num_entry_point_offsets,
2645  s->ps.sps->ctb_width, s->ps.sps->ctb_height
2646  );
2647  res = AVERROR_INVALIDDATA;
2648  goto error;
2649  }
2650 
2651  ff_alloc_entries(s->avctx, s->sh.num_entry_point_offsets + 1);
2652 
2653  for (i = 1; i < s->threads_number; i++) {
2654  if (s->sList[i] && s->HEVClcList[i])
2655  continue;
2656  av_freep(&s->sList[i]);
2657  av_freep(&s->HEVClcList[i]);
2658  s->sList[i] = av_malloc(sizeof(HEVCContext));
2659  s->HEVClcList[i] = av_mallocz(sizeof(HEVCLocalContext));
2660  if (!s->sList[i] || !s->HEVClcList[i]) {
2661  res = AVERROR(ENOMEM);
2662  goto error;
2663  }
2664  memcpy(s->sList[i], s, sizeof(HEVCContext));
2665  s->sList[i]->HEVClc = s->HEVClcList[i];
2666  }
2667 
2668  offset = (lc->gb.index >> 3);
2669 
2670  for (j = 0, cmpt = 0, startheader = offset + s->sh.entry_point_offset[0]; j < nal->skipped_bytes; j++) {
2671  if (nal->skipped_bytes_pos[j] >= offset && nal->skipped_bytes_pos[j] < startheader) {
2672  startheader--;
2673  cmpt++;
2674  }
2675  }
2676 
2677  for (i = 1; i < s->sh.num_entry_point_offsets; i++) {
2678  offset += (s->sh.entry_point_offset[i - 1] - cmpt);
2679  for (j = 0, cmpt = 0, startheader = offset
2680  + s->sh.entry_point_offset[i]; j < nal->skipped_bytes; j++) {
2681  if (nal->skipped_bytes_pos[j] >= offset && nal->skipped_bytes_pos[j] < startheader) {
2682  startheader--;
2683  cmpt++;
2684  }
2685  }
2686  s->sh.size[i - 1] = s->sh.entry_point_offset[i] - cmpt;
2687  s->sh.offset[i - 1] = offset;
2688 
2689  }
2690  if (s->sh.num_entry_point_offsets != 0) {
2691  offset += s->sh.entry_point_offset[s->sh.num_entry_point_offsets - 1] - cmpt;
2692  if (length < offset) {
2693  av_log(s->avctx, AV_LOG_ERROR, "entry_point_offset table is corrupted\n");
2694  res = AVERROR_INVALIDDATA;
2695  goto error;
2696  }
2697  s->sh.size[s->sh.num_entry_point_offsets - 1] = length - offset;
2698  s->sh.offset[s->sh.num_entry_point_offsets - 1] = offset;
2699 
2700  }
2701  s->data = data;
2702 
2703  for (i = 1; i < s->threads_number; i++) {
2704  s->sList[i]->HEVClc->first_qp_group = 1;
2705  s->sList[i]->HEVClc->qp_y = s->sList[0]->HEVClc->qp_y;
2706  memcpy(s->sList[i], s, sizeof(HEVCContext));
2707  s->sList[i]->HEVClc = s->HEVClcList[i];
2708  }
2709 
2710  atomic_store(&s->wpp_err, 0);
2711  ff_reset_entries(s->avctx);
2712 
2713  for (i = 0; i <= s->sh.num_entry_point_offsets; i++) {
2714  arg[i] = i;
2715  ret[i] = 0;
2716  }
2717 
2718  if (s->ps.pps->entropy_coding_sync_enabled_flag)
2719  s->avctx->execute2(s->avctx, hls_decode_entry_wpp, arg, ret, s->sh.num_entry_point_offsets + 1);
2720 
2721  for (i = 0; i <= s->sh.num_entry_point_offsets; i++)
2722  res += ret[i];
2723 error:
2724  av_free(ret);
2725  av_free(arg);
2726  return res;
2727 }
2728 
2730 {
2731  AVFrame *out = s->ref->frame;
2732  int ret;
2733 
2734  if (s->sei.frame_packing.present &&
2735  s->sei.frame_packing.arrangement_type >= 3 &&
2736  s->sei.frame_packing.arrangement_type <= 5 &&
2737  s->sei.frame_packing.content_interpretation_type > 0 &&
2738  s->sei.frame_packing.content_interpretation_type < 3) {
2740  if (!stereo)
2741  return AVERROR(ENOMEM);
2742 
2743  switch (s->sei.frame_packing.arrangement_type) {
2744  case 3:
2745  if (s->sei.frame_packing.quincunx_subsampling)
2747  else
2748  stereo->type = AV_STEREO3D_SIDEBYSIDE;
2749  break;
2750  case 4:
2751  stereo->type = AV_STEREO3D_TOPBOTTOM;
2752  break;
2753  case 5:
2754  stereo->type = AV_STEREO3D_FRAMESEQUENCE;
2755  break;
2756  }
2757 
2758  if (s->sei.frame_packing.content_interpretation_type == 2)
2759  stereo->flags = AV_STEREO3D_FLAG_INVERT;
2760 
2761  if (s->sei.frame_packing.arrangement_type == 5) {
2762  if (s->sei.frame_packing.current_frame_is_frame0_flag)
2763  stereo->view = AV_STEREO3D_VIEW_LEFT;
2764  else
2765  stereo->view = AV_STEREO3D_VIEW_RIGHT;
2766  }
2767  }
2768 
2769  if (s->sei.display_orientation.present &&
2770  (s->sei.display_orientation.anticlockwise_rotation ||
2771  s->sei.display_orientation.hflip || s->sei.display_orientation.vflip)) {
2772  double angle = s->sei.display_orientation.anticlockwise_rotation * 360 / (double) (1 << 16);
2775  sizeof(int32_t) * 9);
2776  if (!rotation)
2777  return AVERROR(ENOMEM);
2778 
2779  /* av_display_rotation_set() expects the angle in the clockwise
2780  * direction, hence the first minus.
2781  * The below code applies the flips after the rotation, yet
2782  * the H.2645 specs require flipping to be applied first.
2783  * Because of R O(phi) = O(-phi) R (where R is flipping around
2784  * an arbitatry axis and O(phi) is the proper rotation by phi)
2785  * we can create display matrices as desired by negating
2786  * the degree once for every flip applied. */
2787  angle = -angle * (1 - 2 * !!s->sei.display_orientation.hflip)
2788  * (1 - 2 * !!s->sei.display_orientation.vflip);
2789  av_display_rotation_set((int32_t *)rotation->data, angle);
2790  av_display_matrix_flip((int32_t *)rotation->data,
2791  s->sei.display_orientation.hflip,
2792  s->sei.display_orientation.vflip);
2793  }
2794 
2795  // Decrement the mastering display flag when IRAP frame has no_rasl_output_flag=1
2796  // so the side data persists for the entire coded video sequence.
2797  if (s->sei.mastering_display.present > 0 &&
2798  IS_IRAP(s) && s->no_rasl_output_flag) {
2799  s->sei.mastering_display.present--;
2800  }
2801  if (s->sei.mastering_display.present) {
2802  // HEVC uses a g,b,r ordering, which we convert to a more natural r,g,b
2803  const int mapping[3] = {2, 0, 1};
2804  const int chroma_den = 50000;
2805  const int luma_den = 10000;
2806  int i;
2807  AVMasteringDisplayMetadata *metadata =
2809  if (!metadata)
2810  return AVERROR(ENOMEM);
2811 
2812  for (i = 0; i < 3; i++) {
2813  const int j = mapping[i];
2814  metadata->display_primaries[i][0].num = s->sei.mastering_display.display_primaries[j][0];
2815  metadata->display_primaries[i][0].den = chroma_den;
2816  metadata->display_primaries[i][1].num = s->sei.mastering_display.display_primaries[j][1];
2817  metadata->display_primaries[i][1].den = chroma_den;
2818  }
2819  metadata->white_point[0].num = s->sei.mastering_display.white_point[0];
2820  metadata->white_point[0].den = chroma_den;
2821  metadata->white_point[1].num = s->sei.mastering_display.white_point[1];
2822  metadata->white_point[1].den = chroma_den;
2823 
2824  metadata->max_luminance.num = s->sei.mastering_display.max_luminance;
2825  metadata->max_luminance.den = luma_den;
2826  metadata->min_luminance.num = s->sei.mastering_display.min_luminance;
2827  metadata->min_luminance.den = luma_den;
2828  metadata->has_luminance = 1;
2829  metadata->has_primaries = 1;
2830 
2831  av_log(s->avctx, AV_LOG_DEBUG, "Mastering Display Metadata:\n");
2832  av_log(s->avctx, AV_LOG_DEBUG,
2833  "r(%5.4f,%5.4f) g(%5.4f,%5.4f) b(%5.4f %5.4f) wp(%5.4f, %5.4f)\n",
2834  av_q2d(metadata->display_primaries[0][0]),
2835  av_q2d(metadata->display_primaries[0][1]),
2836  av_q2d(metadata->display_primaries[1][0]),
2837  av_q2d(metadata->display_primaries[1][1]),
2838  av_q2d(metadata->display_primaries[2][0]),
2839  av_q2d(metadata->display_primaries[2][1]),
2840  av_q2d(metadata->white_point[0]), av_q2d(metadata->white_point[1]));
2841  av_log(s->avctx, AV_LOG_DEBUG,
2842  "min_luminance=%f, max_luminance=%f\n",
2843  av_q2d(metadata->min_luminance), av_q2d(metadata->max_luminance));
2844  }
2845  // Decrement the mastering display flag when IRAP frame has no_rasl_output_flag=1
2846  // so the side data persists for the entire coded video sequence.
2847  if (s->sei.content_light.present > 0 &&
2848  IS_IRAP(s) && s->no_rasl_output_flag) {
2849  s->sei.content_light.present--;
2850  }
2851  if (s->sei.content_light.present) {
2852  AVContentLightMetadata *metadata =
2854  if (!metadata)
2855  return AVERROR(ENOMEM);
2856  metadata->MaxCLL = s->sei.content_light.max_content_light_level;
2857  metadata->MaxFALL = s->sei.content_light.max_pic_average_light_level;
2858 
2859  av_log(s->avctx, AV_LOG_DEBUG, "Content Light Level Metadata:\n");
2860  av_log(s->avctx, AV_LOG_DEBUG, "MaxCLL=%d, MaxFALL=%d\n",
2861  metadata->MaxCLL, metadata->MaxFALL);
2862  }
2863 
2864  if (s->sei.a53_caption.buf_ref) {
2865  HEVCSEIA53Caption *a53 = &s->sei.a53_caption;
2866 
2868  if (!sd)
2869  av_buffer_unref(&a53->buf_ref);
2870  a53->buf_ref = NULL;
2871  }
2872 
2873  for (int i = 0; i < s->sei.unregistered.nb_buf_ref; i++) {
2874  HEVCSEIUnregistered *unreg = &s->sei.unregistered;
2875 
2876  if (unreg->buf_ref[i]) {
2879  unreg->buf_ref[i]);
2880  if (!sd)
2881  av_buffer_unref(&unreg->buf_ref[i]);
2882  unreg->buf_ref[i] = NULL;
2883  }
2884  }
2885  s->sei.unregistered.nb_buf_ref = 0;
2886 
2887  if (s->sei.timecode.present) {
2888  uint32_t *tc_sd;
2889  char tcbuf[AV_TIMECODE_STR_SIZE];
2891  sizeof(uint32_t) * 4);
2892  if (!tcside)
2893  return AVERROR(ENOMEM);
2894 
2895  tc_sd = (uint32_t*)tcside->data;
2896  tc_sd[0] = s->sei.timecode.num_clock_ts;
2897 
2898  for (int i = 0; i < tc_sd[0]; i++) {
2899  int drop = s->sei.timecode.cnt_dropped_flag[i];
2900  int hh = s->sei.timecode.hours_value[i];
2901  int mm = s->sei.timecode.minutes_value[i];
2902  int ss = s->sei.timecode.seconds_value[i];
2903  int ff = s->sei.timecode.n_frames[i];
2904 
2905  tc_sd[i + 1] = av_timecode_get_smpte(s->avctx->framerate, drop, hh, mm, ss, ff);
2906  av_timecode_make_smpte_tc_string2(tcbuf, s->avctx->framerate, tc_sd[i + 1], 0, 0);
2907  av_dict_set(&out->metadata, "timecode", tcbuf, 0);
2908  }
2909 
2910  s->sei.timecode.num_clock_ts = 0;
2911  }
2912 
2913  if (s->sei.film_grain_characteristics.present) {
2914  HEVCSEIFilmGrainCharacteristics *fgc = &s->sei.film_grain_characteristics;
2916  if (!fgp)
2917  return AVERROR(ENOMEM);
2918 
2920  fgp->seed = s->ref->poc; /* no poc_offset in HEVC */
2921 
2922  fgp->codec.h274.model_id = fgc->model_id;
2926  fgp->codec.h274.color_range = fgc->full_range + 1;
2929  fgp->codec.h274.color_space = fgc->matrix_coeffs;
2930  } else {
2931  const HEVCSPS *sps = s->ps.sps;
2932  const VUI *vui = &sps->vui;
2933  fgp->codec.h274.bit_depth_luma = sps->bit_depth;
2934  fgp->codec.h274.bit_depth_chroma = sps->bit_depth_chroma;
2936  fgp->codec.h274.color_range = vui->video_full_range_flag + 1;
2937  else
2942  fgp->codec.h274.color_space = vui->matrix_coeffs;
2943  } else {
2947  }
2948  }
2951 
2953  sizeof(fgp->codec.h274.component_model_present));
2955  sizeof(fgp->codec.h274.num_intensity_intervals));
2956  memcpy(&fgp->codec.h274.num_model_values, &fgc->num_model_values,
2957  sizeof(fgp->codec.h274.num_model_values));
2962  memcpy(&fgp->codec.h274.comp_model_value, &fgc->comp_model_value,
2963  sizeof(fgp->codec.h274.comp_model_value));
2964 
2965  fgc->present = fgc->persistence_flag;
2966  }
2967 
2968  if (s->sei.dynamic_hdr_plus.info) {
2969  AVBufferRef *info_ref = av_buffer_ref(s->sei.dynamic_hdr_plus.info);
2970  if (!info_ref)
2971  return AVERROR(ENOMEM);
2972 
2974  av_buffer_unref(&info_ref);
2975  return AVERROR(ENOMEM);
2976  }
2977  }
2978 
2979  if (s->rpu_buf) {
2981  if (!rpu)
2982  return AVERROR(ENOMEM);
2983 
2984  s->rpu_buf = NULL;
2985  }
2986 
2987  if ((ret = ff_dovi_attach_side_data(&s->dovi_ctx, out)) < 0)
2988  return ret;
2989 
2990  if (s->sei.dynamic_hdr_vivid.info) {
2991  AVBufferRef *info_ref = av_buffer_ref(s->sei.dynamic_hdr_vivid.info);
2992  if (!info_ref)
2993  return AVERROR(ENOMEM);
2994 
2996  av_buffer_unref(&info_ref);
2997  return AVERROR(ENOMEM);
2998  }
2999  }
3000 
3001  return 0;
3002 }
3003 
3005 {
3006  HEVCLocalContext *lc = s->HEVClc;
3007  int pic_size_in_ctb = ((s->ps.sps->width >> s->ps.sps->log2_min_cb_size) + 1) *
3008  ((s->ps.sps->height >> s->ps.sps->log2_min_cb_size) + 1);
3009  int ret;
3010 
3011  memset(s->horizontal_bs, 0, s->bs_width * s->bs_height);
3012  memset(s->vertical_bs, 0, s->bs_width * s->bs_height);
3013  memset(s->cbf_luma, 0, s->ps.sps->min_tb_width * s->ps.sps->min_tb_height);
3014  memset(s->is_pcm, 0, (s->ps.sps->min_pu_width + 1) * (s->ps.sps->min_pu_height + 1));
3015  memset(s->tab_slice_address, -1, pic_size_in_ctb * sizeof(*s->tab_slice_address));
3016 
3017  s->is_decoded = 0;
3018  s->first_nal_type = s->nal_unit_type;
3019 
3020  s->no_rasl_output_flag = IS_IDR(s) || IS_BLA(s) || (s->nal_unit_type == HEVC_NAL_CRA_NUT && s->last_eos);
3021 
3022  if (s->ps.pps->tiles_enabled_flag)
3023  lc->end_of_tiles_x = s->ps.pps->column_width[0] << s->ps.sps->log2_ctb_size;
3024 
3025  ret = ff_hevc_set_new_ref(s, &s->frame, s->poc);
3026  if (ret < 0)
3027  goto fail;
3028 
3029  ret = ff_hevc_frame_rps(s);
3030  if (ret < 0) {
3031  av_log(s->avctx, AV_LOG_ERROR, "Error constructing the frame RPS.\n");
3032  goto fail;
3033  }
3034 
3035  s->ref->frame->key_frame = IS_IRAP(s);
3036 
3037  s->ref->needs_fg = s->sei.film_grain_characteristics.present &&
3038  !(s->avctx->export_side_data & AV_CODEC_EXPORT_DATA_FILM_GRAIN) &&
3039  !s->avctx->hwaccel;
3040 
3041  if (s->ref->needs_fg) {
3042  s->ref->frame_grain->format = s->ref->frame->format;
3043  s->ref->frame_grain->width = s->ref->frame->width;
3044  s->ref->frame_grain->height = s->ref->frame->height;
3045  if ((ret = ff_thread_get_buffer(s->avctx, s->ref->frame_grain, 0)) < 0)
3046  goto fail;
3047  }
3048 
3049  ret = set_side_data(s);
3050  if (ret < 0)
3051  goto fail;
3052 
3053  s->frame->pict_type = 3 - s->sh.slice_type;
3054 
3055  if (!IS_IRAP(s))
3057 
3058  av_frame_unref(s->output_frame);
3059  ret = ff_hevc_output_frame(s, s->output_frame, 0);
3060  if (ret < 0)
3061  goto fail;
3062 
3063  if (!s->avctx->hwaccel)
3064  ff_thread_finish_setup(s->avctx);
3065 
3066  return 0;
3067 
3068 fail:
3069  if (s->ref)
3070  ff_hevc_unref_frame(s, s->ref, ~0);
3071  s->ref = NULL;
3072  return ret;
3073 }
3074 
3076 {
3077  HEVCFrame *out = s->ref;
3078  const AVFrameSideData *sd;
3079  int ret;
3080 
3081  if (out->needs_fg) {
3083  av_assert0(out->frame_grain->buf[0] && sd);
3084  ret = ff_h274_apply_film_grain(out->frame_grain, out->frame, &s->h274db,
3085  (AVFilmGrainParams *) sd->data);
3086 
3087  if (ret < 0) {
3088  av_log(s->avctx, AV_LOG_WARNING, "Failed synthesizing film "
3089  "grain, ignoring: %s\n", av_err2str(ret));
3090  out->needs_fg = 0;
3091  }
3092  }
3093 
3094  return 0;
3095 }
3096 
3097 static int decode_nal_unit(HEVCContext *s, const H2645NAL *nal)
3098 {
3099  HEVCLocalContext *lc = s->HEVClc;
3100  GetBitContext *gb = &lc->gb;
3101  int ctb_addr_ts, ret;
3102 
3103  *gb = nal->gb;
3104  s->nal_unit_type = nal->type;
3105  s->temporal_id = nal->temporal_id;
3106 
3107  switch (s->nal_unit_type) {
3108  case HEVC_NAL_VPS:
3109  if (s->avctx->hwaccel && s->avctx->hwaccel->decode_params) {
3110  ret = s->avctx->hwaccel->decode_params(s->avctx,
3111  nal->type,
3112  nal->raw_data,
3113  nal->raw_size);
3114  if (ret < 0)
3115  goto fail;
3116  }
3117  ret = ff_hevc_decode_nal_vps(gb, s->avctx, &s->ps);
3118  if (ret < 0)
3119  goto fail;
3120  break;
3121  case HEVC_NAL_SPS:
3122  if (s->avctx->hwaccel && s->avctx->hwaccel->decode_params) {
3123  ret = s->avctx->hwaccel->decode_params(s->avctx,
3124  nal->type,
3125  nal->raw_data,
3126  nal->raw_size);
3127  if (ret < 0)
3128  goto fail;
3129  }
3130  ret = ff_hevc_decode_nal_sps(gb, s->avctx, &s->ps,
3131  s->apply_defdispwin);
3132  if (ret < 0)
3133  goto fail;
3134  break;
3135  case HEVC_NAL_PPS:
3136  if (s->avctx->hwaccel && s->avctx->hwaccel->decode_params) {
3137  ret = s->avctx->hwaccel->decode_params(s->avctx,
3138  nal->type,
3139  nal->raw_data,
3140  nal->raw_size);
3141  if (ret < 0)
3142  goto fail;
3143  }
3144  ret = ff_hevc_decode_nal_pps(gb, s->avctx, &s->ps);
3145  if (ret < 0)
3146  goto fail;
3147  break;
3148  case HEVC_NAL_SEI_PREFIX:
3149  case HEVC_NAL_SEI_SUFFIX:
3150  if (s->avctx->hwaccel && s->avctx->hwaccel->decode_params) {
3151  ret = s->avctx->hwaccel->decode_params(s->avctx,
3152  nal->type,
3153  nal->raw_data,
3154  nal->raw_size);
3155  if (ret < 0)
3156  goto fail;
3157  }
3158  ret = ff_hevc_decode_nal_sei(gb, s->avctx, &s->sei, &s->ps, s->nal_unit_type);
3159  if (ret < 0)
3160  goto fail;
3161  break;
3162  case HEVC_NAL_TRAIL_R:
3163  case HEVC_NAL_TRAIL_N:
3164  case HEVC_NAL_TSA_N:
3165  case HEVC_NAL_TSA_R:
3166  case HEVC_NAL_STSA_N:
3167  case HEVC_NAL_STSA_R:
3168  case HEVC_NAL_BLA_W_LP:
3169  case HEVC_NAL_BLA_W_RADL:
3170  case HEVC_NAL_BLA_N_LP:
3171  case HEVC_NAL_IDR_W_RADL:
3172  case HEVC_NAL_IDR_N_LP:
3173  case HEVC_NAL_CRA_NUT:
3174  case HEVC_NAL_RADL_N:
3175  case HEVC_NAL_RADL_R:
3176  case HEVC_NAL_RASL_N:
3177  case HEVC_NAL_RASL_R:
3178  ret = hls_slice_header(s);
3179  if (ret < 0)
3180  return ret;
3181  if (ret == 1) {
3183  goto fail;
3184  }
3185 
3186 
3187  if (
3188  (s->avctx->skip_frame >= AVDISCARD_BIDIR && s->sh.slice_type == HEVC_SLICE_B) ||
3189  (s->avctx->skip_frame >= AVDISCARD_NONINTRA && s->sh.slice_type != HEVC_SLICE_I) ||
3190  (s->avctx->skip_frame >= AVDISCARD_NONKEY && !IS_IRAP(s))) {
3191  break;
3192  }
3193 
3194  if (s->sh.first_slice_in_pic_flag) {
3195  if (s->max_ra == INT_MAX) {
3196  if (s->nal_unit_type == HEVC_NAL_CRA_NUT || IS_BLA(s)) {
3197  s->max_ra = s->poc;
3198  } else {
3199  if (IS_IDR(s))
3200  s->max_ra = INT_MIN;
3201  }
3202  }
3203 
3204  if ((s->nal_unit_type == HEVC_NAL_RASL_R || s->nal_unit_type == HEVC_NAL_RASL_N) &&
3205  s->poc <= s->max_ra) {
3206  s->is_decoded = 0;
3207  break;
3208  } else {
3209  if (s->nal_unit_type == HEVC_NAL_RASL_R && s->poc > s->max_ra)
3210  s->max_ra = INT_MIN;
3211  }
3212 
3213  s->overlap ++;
3214  ret = hevc_frame_start(s);
3215  if (ret < 0)
3216  return ret;
3217  } else if (!s->ref) {
3218  av_log(s->avctx, AV_LOG_ERROR, "First slice in a frame missing.\n");
3219  goto fail;
3220  }
3221 
3222  if (s->nal_unit_type != s->first_nal_type) {
3223  av_log(s->avctx, AV_LOG_ERROR,
3224  "Non-matching NAL types of the VCL NALUs: %d %d\n",
3225  s->first_nal_type, s->nal_unit_type);
3226  return AVERROR_INVALIDDATA;
3227  }
3228 
3229  if (!s->sh.dependent_slice_segment_flag &&
3230  s->sh.slice_type != HEVC_SLICE_I) {
3231  ret = ff_hevc_slice_rpl(s);
3232  if (ret < 0) {
3233  av_log(s->avctx, AV_LOG_WARNING,
3234  "Error constructing the reference lists for the current slice.\n");
3235  goto fail;
3236  }
3237  }
3238 
3239  if (s->sh.first_slice_in_pic_flag && s->avctx->hwaccel) {
3240  ret = s->avctx->hwaccel->start_frame(s->avctx, NULL, 0);
3241  if (ret < 0)
3242  goto fail;
3243  }
3244 
3245  if (s->avctx->hwaccel) {
3246  ret = s->avctx->hwaccel->decode_slice(s->avctx, nal->raw_data, nal->raw_size);
3247  if (ret < 0)
3248  goto fail;
3249  } else {
3250  if (s->threads_number > 1 && s->sh.num_entry_point_offsets > 0)
3251  ctb_addr_ts = hls_slice_data_wpp(s, nal);
3252  else
3253  ctb_addr_ts = hls_slice_data(s);
3254  if (ctb_addr_ts >= (s->ps.sps->ctb_width * s->ps.sps->ctb_height)) {
3255  ret = hevc_frame_end(s);
3256  if (ret < 0)
3257  goto fail;
3258  s->is_decoded = 1;
3259  }
3260 
3261  if (ctb_addr_ts < 0) {
3262  ret = ctb_addr_ts;
3263  goto fail;
3264  }
3265  }
3266  break;
3267  case HEVC_NAL_EOS_NUT:
3268  case HEVC_NAL_EOB_NUT:
3269  s->seq_decode = (s->seq_decode + 1) & 0xff;
3270  s->max_ra = INT_MAX;
3271  break;
3272  case HEVC_NAL_AUD:
3273  case HEVC_NAL_FD_NUT:
3274  case HEVC_NAL_UNSPEC62:
3275  break;
3276  default:
3277  av_log(s->avctx, AV_LOG_INFO,
3278  "Skipping NAL unit %d\n", s->nal_unit_type);
3279  }
3280 
3281  return 0;
3282 fail:
3283  if (s->avctx->err_recognition & AV_EF_EXPLODE)
3284  return ret;
3285  return 0;
3286 }
3287 
3288 static int decode_nal_units(HEVCContext *s, const uint8_t *buf, int length)
3289 {
3290  int i, ret = 0;
3291  int eos_at_start = 1;
3292 
3293  s->ref = NULL;
3294  s->last_eos = s->eos;
3295  s->eos = 0;
3296  s->overlap = 0;
3297 
3298  /* split the input packet into NAL units, so we know the upper bound on the
3299  * number of slices in the frame */
3300  ret = ff_h2645_packet_split(&s->pkt, buf, length, s->avctx, s->is_nalff,
3301  s->nal_length_size, s->avctx->codec_id, 1, 0);
3302  if (ret < 0) {
3303  av_log(s->avctx, AV_LOG_ERROR,
3304  "Error splitting the input into NAL units.\n");
3305  return ret;
3306  }
3307 
3308  for (i = 0; i < s->pkt.nb_nals; i++) {
3309  if (s->pkt.nals[i].type == HEVC_NAL_EOB_NUT ||
3310  s->pkt.nals[i].type == HEVC_NAL_EOS_NUT) {
3311  if (eos_at_start) {
3312  s->last_eos = 1;
3313  } else {
3314  s->eos = 1;
3315  }
3316  } else {
3317  eos_at_start = 0;
3318  }
3319  }
3320 
3321  /*
3322  * Check for RPU delimiter.
3323  *
3324  * Dolby Vision RPUs masquerade as unregistered NALs of type 62.
3325  *
3326  * We have to do this check here an create the rpu buffer, since RPUs are appended
3327  * to the end of an AU; they are the last non-EOB/EOS NAL in the AU.
3328  */
3329  if (s->pkt.nb_nals > 1 && s->pkt.nals[s->pkt.nb_nals - 1].type == HEVC_NAL_UNSPEC62 &&
3330  s->pkt.nals[s->pkt.nb_nals - 1].size > 2 && !s->pkt.nals[s->pkt.nb_nals - 1].nuh_layer_id
3331  && !s->pkt.nals[s->pkt.nb_nals - 1].temporal_id) {
3332  H2645NAL *nal = &s->pkt.nals[s->pkt.nb_nals - 1];
3333  if (s->rpu_buf) {
3334  av_buffer_unref(&s->rpu_buf);
3335  av_log(s->avctx, AV_LOG_WARNING, "Multiple Dolby Vision RPUs found in one AU. Skipping previous.\n");
3336  }
3337 
3338  s->rpu_buf = av_buffer_alloc(nal->raw_size - 2);
3339  if (!s->rpu_buf)
3340  return AVERROR(ENOMEM);
3341  memcpy(s->rpu_buf->data, nal->raw_data + 2, nal->raw_size - 2);
3342 
3343  ret = ff_dovi_rpu_parse(&s->dovi_ctx, nal->data + 2, nal->size - 2);
3344  if (ret < 0) {
3345  av_buffer_unref(&s->rpu_buf);
3346  av_log(s->avctx, AV_LOG_WARNING, "Error parsing DOVI NAL unit.\n");
3347  /* ignore */
3348  }
3349  }
3350 
3351  /* decode the NAL units */
3352  for (i = 0; i < s->pkt.nb_nals; i++) {
3353  H2645NAL *nal = &s->pkt.nals[i];
3354 
3355  if (s->avctx->skip_frame >= AVDISCARD_ALL ||
3356  (s->avctx->skip_frame >= AVDISCARD_NONREF
3357  && ff_hevc_nal_is_nonref(nal->type)) || nal->nuh_layer_id > 0)
3358  continue;
3359 
3360  ret = decode_nal_unit(s, nal);
3361  if (ret >= 0 && s->overlap > 2)
3363  if (ret < 0) {
3364  av_log(s->avctx, AV_LOG_WARNING,
3365  "Error parsing NAL unit #%d.\n", i);
3366  goto fail;
3367  }
3368  }
3369 
3370 fail:
3371  if (s->ref && s->threads_type == FF_THREAD_FRAME)
3372  ff_thread_report_progress(&s->ref->tf, INT_MAX, 0);
3373 
3374  return ret;
3375 }
3376 
3377 static void print_md5(void *log_ctx, int level, uint8_t md5[16])
3378 {
3379  int i;
3380  for (i = 0; i < 16; i++)
3381  av_log(log_ctx, level, "%02"PRIx8, md5[i]);
3382 }
3383 
3385 {
3387  int pixel_shift;
3388  int i, j;
3389 
3390  if (!desc)
3391  return AVERROR(EINVAL);
3392 
3393  pixel_shift = desc->comp[0].depth > 8;
3394 
3395  av_log(s->avctx, AV_LOG_DEBUG, "Verifying checksum for frame with POC %d: ",
3396  s->poc);
3397 
3398  /* the checksums are LE, so we have to byteswap for >8bpp formats
3399  * on BE arches */
3400 #if HAVE_BIGENDIAN
3401  if (pixel_shift && !s->checksum_buf) {
3402  av_fast_malloc(&s->checksum_buf, &s->checksum_buf_size,
3403  FFMAX3(frame->linesize[0], frame->linesize[1],
3404  frame->linesize[2]));
3405  if (!s->checksum_buf)
3406  return AVERROR(ENOMEM);
3407  }
3408 #endif
3409 
3410  for (i = 0; frame->data[i]; i++) {
3411  int width = s->avctx->coded_width;
3412  int height = s->avctx->coded_height;
3413  int w = (i == 1 || i == 2) ? (width >> desc->log2_chroma_w) : width;
3414  int h = (i == 1 || i == 2) ? (height >> desc->log2_chroma_h) : height;
3415  uint8_t md5[16];
3416 
3417  av_md5_init(s->md5_ctx);
3418  for (j = 0; j < h; j++) {
3419  const uint8_t *src = frame->data[i] + j * frame->linesize[i];
3420 #if HAVE_BIGENDIAN
3421  if (pixel_shift) {
3422  s->bdsp.bswap16_buf((uint16_t *) s->checksum_buf,
3423  (const uint16_t *) src, w);
3424  src = s->checksum_buf;
3425  }
3426 #endif
3427  av_md5_update(s->md5_ctx, src, w << pixel_shift);
3428  }
3429  av_md5_final(s->md5_ctx, md5);
3430 
3431  if (!memcmp(md5, s->sei.picture_hash.md5[i], 16)) {
3432  av_log (s->avctx, AV_LOG_DEBUG, "plane %d - correct ", i);
3433  print_md5(s->avctx, AV_LOG_DEBUG, md5);
3434  av_log (s->avctx, AV_LOG_DEBUG, "; ");
3435  } else {
3436  av_log (s->avctx, AV_LOG_ERROR, "mismatching checksum of plane %d - ", i);
3437  print_md5(s->avctx, AV_LOG_ERROR, md5);
3438  av_log (s->avctx, AV_LOG_ERROR, " != ");
3439  print_md5(s->avctx, AV_LOG_ERROR, s->sei.picture_hash.md5[i]);
3440  av_log (s->avctx, AV_LOG_ERROR, "\n");
3441  return AVERROR_INVALIDDATA;
3442  }
3443  }
3444 
3445  av_log(s->avctx, AV_LOG_DEBUG, "\n");
3446 
3447  return 0;
3448 }
3449 
3450 static int hevc_decode_extradata(HEVCContext *s, uint8_t *buf, int length, int first)
3451 {
3452  int ret, i;
3453 
3454  ret = ff_hevc_decode_extradata(buf, length, &s->ps, &s->sei, &s->is_nalff,
3455  &s->nal_length_size, s->avctx->err_recognition,
3456  s->apply_defdispwin, s->avctx);
3457  if (ret < 0)
3458  return ret;
3459 
3460  /* export stream parameters from the first SPS */
3461  for (i = 0; i < FF_ARRAY_ELEMS(s->ps.sps_list); i++) {
3462  if (first && s->ps.sps_list[i]) {
3463  const HEVCSPS *sps = (const HEVCSPS*)s->ps.sps_list[i]->data;
3465  break;
3466  }
3467  }
3468 
3469  /* export stream parameters from SEI */
3471  if (ret < 0)
3472  return ret;
3473 
3474  return 0;
3475 }
3476 
3477 static int hevc_decode_frame(AVCodecContext *avctx, AVFrame *rframe,
3478  int *got_output, AVPacket *avpkt)
3479 {
3480  int ret;
3481  uint8_t *sd;
3482  size_t sd_size;
3483  HEVCContext *s = avctx->priv_data;
3484 
3485  if (!avpkt->size) {
3486  ret = ff_hevc_output_frame(s, rframe, 1);
3487  if (ret < 0)
3488  return ret;
3489 
3490  *got_output = ret;
3491  return 0;
3492  }
3493 
3494  sd = av_packet_get_side_data(avpkt, AV_PKT_DATA_NEW_EXTRADATA, &sd_size);
3495  if (sd && sd_size > 0) {
3496  ret = hevc_decode_extradata(s, sd, sd_size, 0);
3497  if (ret < 0)
3498  return ret;
3499  }
3500 
3501  sd = av_packet_get_side_data(avpkt, AV_PKT_DATA_DOVI_CONF, &sd_size);
3502  if (sd && sd_size > 0)
3504 
3505  s->ref = NULL;
3506  ret = decode_nal_units(s, avpkt->data, avpkt->size);
3507  if (ret < 0)
3508  return ret;
3509 
3510  if (avctx->hwaccel) {
3511  if (s->ref && (ret = avctx->hwaccel->end_frame(avctx)) < 0) {
3512  av_log(avctx, AV_LOG_ERROR,
3513  "hardware accelerator failed to decode picture\n");
3514  ff_hevc_unref_frame(s, s->ref, ~0);
3515  return ret;
3516  }
3517  } else {
3518  /* verify the SEI checksum */
3519  if (avctx->err_recognition & AV_EF_CRCCHECK && s->is_decoded &&
3520  s->sei.picture_hash.is_md5) {
3521  ret = verify_md5(s, s->ref->frame);
3522  if (ret < 0 && avctx->err_recognition & AV_EF_EXPLODE) {
3523  ff_hevc_unref_frame(s, s->ref, ~0);
3524  return ret;
3525  }
3526  }
3527  }
3528  s->sei.picture_hash.is_md5 = 0;
3529 
3530  if (s->is_decoded) {
3531  av_log(avctx, AV_LOG_DEBUG, "Decoded frame with POC %d.\n", s->poc);
3532  s->is_decoded = 0;
3533  }
3534 
3535  if (s->output_frame->buf[0]) {
3536  av_frame_move_ref(rframe, s->output_frame);
3537  *got_output = 1;
3538  }
3539 
3540  return avpkt->size;
3541 }
3542 
3544 {
3545  int ret;
3546 
3547  ret = ff_thread_ref_frame(&dst->tf, &src->tf);
3548  if (ret < 0)
3549  return ret;
3550 
3551  if (src->needs_fg) {
3552  ret = av_frame_ref(dst->frame_grain, src->frame_grain);
3553  if (ret < 0)
3554  return ret;
3555  dst->needs_fg = 1;
3556  }
3557 
3558  dst->tab_mvf_buf = av_buffer_ref(src->tab_mvf_buf);
3559  if (!dst->tab_mvf_buf)
3560  goto fail;
3561  dst->tab_mvf = src->tab_mvf;
3562 
3563  dst->rpl_tab_buf = av_buffer_ref(src->rpl_tab_buf);
3564  if (!dst->rpl_tab_buf)
3565  goto fail;
3566  dst->rpl_tab = src->rpl_tab;
3567 
3568  dst->rpl_buf = av_buffer_ref(src->rpl_buf);
3569  if (!dst->rpl_buf)
3570  goto fail;
3571 
3572  dst->poc = src->poc;
3573  dst->ctb_count = src->ctb_count;
3574  dst->flags = src->flags;
3575  dst->sequence = src->sequence;
3576 
3577  if (src->hwaccel_picture_private) {
3578  dst->hwaccel_priv_buf = av_buffer_ref(src->hwaccel_priv_buf);
3579  if (!dst->hwaccel_priv_buf)
3580  goto fail;
3582  }
3583 
3584  return 0;
3585 fail:
3586  ff_hevc_unref_frame(s, dst, ~0);
3587  return AVERROR(ENOMEM);
3588 }
3589 
3591 {
3592  HEVCContext *s = avctx->priv_data;
3593  int i;
3594 
3595  pic_arrays_free(s);
3596 
3597  ff_dovi_ctx_unref(&s->dovi_ctx);
3598  av_buffer_unref(&s->rpu_buf);
3599 
3600  av_freep(&s->md5_ctx);
3601 
3602  av_freep(&s->cabac_state);
3603 
3604  for (i = 0; i < 3; i++) {
3605  av_freep(&s->sao_pixel_buffer_h[i]);
3606  av_freep(&s->sao_pixel_buffer_v[i]);
3607  }
3608  av_frame_free(&s->output_frame);
3609 
3610  for (i = 0; i < FF_ARRAY_ELEMS(s->DPB); i++) {
3611  ff_hevc_unref_frame(s, &s->DPB[i], ~0);
3612  av_frame_free(&s->DPB[i].frame);
3613  av_frame_free(&s->DPB[i].frame_grain);
3614  }
3615 
3616  ff_hevc_ps_uninit(&s->ps);
3617 
3618  av_freep(&s->sh.entry_point_offset);
3619  av_freep(&s->sh.offset);
3620  av_freep(&s->sh.size);
3621 
3622  if (s->HEVClcList && s->sList) {
3623  for (i = 1; i < s->threads_number; i++) {
3624  av_freep(&s->HEVClcList[i]);
3625  av_freep(&s->sList[i]);
3626  }
3627  }
3628  av_freep(&s->HEVClc);
3629  av_freep(&s->HEVClcList);
3630  av_freep(&s->sList);
3631 
3632  ff_h2645_packet_uninit(&s->pkt);
3633 
3634  ff_hevc_reset_sei(&s->sei);
3635 
3636  return 0;
3637 }
3638 
3640 {
3641  HEVCContext *s = avctx->priv_data;
3642  int i;
3643 
3644  s->avctx = avctx;
3645 
3646  s->HEVClc = av_mallocz(sizeof(HEVCLocalContext));
3647  s->HEVClcList = av_mallocz(sizeof(HEVCLocalContext*) * s->threads_number);
3648  s->sList = av_mallocz(sizeof(HEVCContext*) * s->threads_number);
3649  if (!s->HEVClc || !s->HEVClcList || !s->sList)
3650  return AVERROR(ENOMEM);
3651  s->HEVClcList[0] = s->HEVClc;
3652  s->sList[0] = s;
3653 
3654  s->cabac_state = av_malloc(HEVC_CONTEXTS);
3655  if (!s->cabac_state)
3656  return AVERROR(ENOMEM);
3657 
3658  s->output_frame = av_frame_alloc();
3659  if (!s->output_frame)
3660  return AVERROR(ENOMEM);
3661 
3662  for (i = 0; i < FF_ARRAY_ELEMS(s->DPB); i++) {
3663  s->DPB[i].frame = av_frame_alloc();
3664  if (!s->DPB[i].frame)
3665  return AVERROR(ENOMEM);
3666  s->DPB[i].tf.f = s->DPB[i].frame;
3667 
3668  s->DPB[i].frame_grain = av_frame_alloc();
3669  if (!s->DPB[i].frame_grain)
3670  return AVERROR(ENOMEM);
3671  }
3672 
3673  s->max_ra = INT_MAX;
3674 
3675  s->md5_ctx = av_md5_alloc();
3676  if (!s->md5_ctx)
3677  return AVERROR(ENOMEM);
3678 
3679  ff_bswapdsp_init(&s->bdsp);
3680 
3681  s->dovi_ctx.logctx = avctx;
3682  s->eos = 0;
3683 
3684  ff_hevc_reset_sei(&s->sei);
3685 
3686  return 0;
3687 }
3688 
3689 #if HAVE_THREADS
3690 static int hevc_update_thread_context(AVCodecContext *dst,
3691  const AVCodecContext *src)
3692 {
3693  HEVCContext *s = dst->priv_data;
3694  HEVCContext *s0 = src->priv_data;
3695  int i, ret;
3696 
3697  for (i = 0; i < FF_ARRAY_ELEMS(s->DPB); i++) {
3698  ff_hevc_unref_frame(s, &s->DPB[i], ~0);
3699  if (s0->DPB[i].frame->buf[0]) {
3700  ret = hevc_ref_frame(s, &s->DPB[i], &s0->DPB[i]);
3701  if (ret < 0)
3702  return ret;
3703  }
3704  }
3705 
3706  if (s->ps.sps != s0->ps.sps)
3707  s->ps.sps = NULL;
3708  for (i = 0; i < FF_ARRAY_ELEMS(s->ps.vps_list); i++) {
3709  ret = av_buffer_replace(&s->ps.vps_list[i], s0->ps.vps_list[i]);
3710  if (ret < 0)
3711  return ret;
3712  }
3713 
3714  for (i = 0; i < FF_ARRAY_ELEMS(s->ps.sps_list); i++) {
3715  ret = av_buffer_replace(&s->ps.sps_list[i], s0->ps.sps_list[i]);
3716  if (ret < 0)
3717  return ret;
3718  }
3719 
3720  for (i = 0; i < FF_ARRAY_ELEMS(s->ps.pps_list); i++) {
3721  ret = av_buffer_replace(&s->ps.pps_list[i], s0->ps.pps_list[i]);
3722  if (ret < 0)
3723  return ret;
3724  }
3725 
3726  if (s->ps.sps != s0->ps.sps)
3727  if ((ret = set_sps(s, s0->ps.sps, src->pix_fmt)) < 0)
3728  return ret;
3729 
3730  s->seq_decode = s0->seq_decode;
3731  s->seq_output = s0->seq_output;
3732  s->pocTid0 = s0->pocTid0;
3733  s->max_ra = s0->max_ra;
3734  s->eos = s0->eos;
3735  s->no_rasl_output_flag = s0->no_rasl_output_flag;
3736 
3737  s->is_nalff = s0->is_nalff;
3738  s->nal_length_size = s0->nal_length_size;
3739 
3740  s->threads_number = s0->threads_number;
3741  s->threads_type = s0->threads_type;
3742 
3743  if (s0->eos) {
3744  s->seq_decode = (s->seq_decode + 1) & 0xff;
3745  s->max_ra = INT_MAX;
3746  }
3747 
3748  ret = av_buffer_replace(&s->sei.a53_caption.buf_ref, s0->sei.a53_caption.buf_ref);
3749  if (ret < 0)
3750  return ret;
3751 
3752  for (i = 0; i < s->sei.unregistered.nb_buf_ref; i++)
3753  av_buffer_unref(&s->sei.unregistered.buf_ref[i]);
3754  s->sei.unregistered.nb_buf_ref = 0;
3755 
3756  if (s0->sei.unregistered.nb_buf_ref) {
3757  ret = av_reallocp_array(&s->sei.unregistered.buf_ref,
3758  s0->sei.unregistered.nb_buf_ref,
3759  sizeof(*s->sei.unregistered.buf_ref));
3760  if (ret < 0)
3761  return ret;
3762 
3763  for (i = 0; i < s0->sei.unregistered.nb_buf_ref; i++) {
3764  s->sei.unregistered.buf_ref[i] = av_buffer_ref(s0->sei.unregistered.buf_ref[i]);
3765  if (!s->sei.unregistered.buf_ref[i])
3766  return AVERROR(ENOMEM);
3767  s->sei.unregistered.nb_buf_ref++;
3768  }
3769  }
3770 
3771  ret = av_buffer_replace(&s->sei.dynamic_hdr_plus.info, s0->sei.dynamic_hdr_plus.info);
3772  if (ret < 0)
3773  return ret;
3774 
3775  ret = av_buffer_replace(&s->rpu_buf, s0->rpu_buf);
3776  if (ret < 0)
3777  return ret;
3778 
3779  ret = ff_dovi_ctx_replace(&s->dovi_ctx, &s0->dovi_ctx);
3780  if (ret < 0)
3781  return ret;
3782 
3783  ret = av_buffer_replace(&s->sei.dynamic_hdr_vivid.info, s0->sei.dynamic_hdr_vivid.info);
3784  if (ret < 0)
3785  return ret;
3786 
3787  s->sei.frame_packing = s0->sei.frame_packing;
3788  s->sei.display_orientation = s0->sei.display_orientation;
3789  s->sei.mastering_display = s0->sei.mastering_display;
3790  s->sei.content_light = s0->sei.content_light;
3791  s->sei.alternative_transfer = s0->sei.alternative_transfer;
3792 
3794  if (ret < 0)
3795  return ret;
3796 
3797  return 0;
3798 }
3799 #endif
3800 
3802 {
3803  HEVCContext *s = avctx->priv_data;
3804  int ret;
3805 
3806  if (avctx->active_thread_type & FF_THREAD_SLICE) {
3807  s->threads_number = avctx->thread_count;
3809  if (ret < 0)
3810  return ret;
3811  } else
3812  s->threads_number = 1;
3813 
3814  if((avctx->active_thread_type & FF_THREAD_FRAME) && avctx->thread_count > 1)
3815  s->threads_type = FF_THREAD_FRAME;
3816  else
3817  s->threads_type = FF_THREAD_SLICE;
3818 
3819  ret = hevc_init_context(avctx);
3820  if (ret < 0)
3821  return ret;
3822 
3823  s->enable_parallel_tiles = 0;
3824  s->sei.picture_timing.picture_struct = 0;
3825  s->eos = 1;
3826 
3827  atomic_init(&s->wpp_err, 0);
3828 
3829  if (!avctx->internal->is_copy) {
3830  if (avctx->extradata_size > 0 && avctx->extradata) {
3831  ret = hevc_decode_extradata(s, avctx->extradata, avctx->extradata_size, 1);
3832  if (ret < 0) {
3833  return ret;
3834  }
3835  }
3836  }
3837 
3838  return 0;
3839 }
3840 
3842 {
3843  HEVCContext *s = avctx->priv_data;
3845  ff_hevc_reset_sei(&s->sei);
3846  ff_dovi_ctx_flush(&s->dovi_ctx);
3847  av_buffer_unref(&s->rpu_buf);
3848  s->max_ra = INT_MAX;
3849  s->eos = 1;
3850 }
3851 
3852 #define OFFSET(x) offsetof(HEVCContext, x)
3853 #define PAR (AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
3854 
3855 static const AVOption options[] = {
3856  { "apply_defdispwin", "Apply default display window from VUI", OFFSET(apply_defdispwin),
3857  AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, PAR },
3858  { "strict-displaywin", "stricly apply default display window size", OFFSET(apply_defdispwin),
3859  AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, PAR },
3860  { NULL },
3861 };
3862 
3863 static const AVClass hevc_decoder_class = {
3864  .class_name = "HEVC decoder",
3865  .item_name = av_default_item_name,
3866  .option = options,
3867  .version = LIBAVUTIL_VERSION_INT,
3868 };
3869 
3871  .p.name = "hevc",
3872  .p.long_name = NULL_IF_CONFIG_SMALL("HEVC (High Efficiency Video Coding)"),
3873  .p.type = AVMEDIA_TYPE_VIDEO,
3874  .p.id = AV_CODEC_ID_HEVC,
3875  .priv_data_size = sizeof(HEVCContext),
3876  .p.priv_class = &hevc_decoder_class,
3877  .init = hevc_decode_init,
3878  .close = hevc_decode_free,
3880  .flush = hevc_decode_flush,
3881  .update_thread_context = ONLY_IF_THREADS_ENABLED(hevc_update_thread_context),
3882  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY |
3886  .p.profiles = NULL_IF_CONFIG_SMALL(ff_hevc_profiles),
3887  .hw_configs = (const AVCodecHWConfigInternal *const []) {
3888 #if CONFIG_HEVC_DXVA2_HWACCEL
3889  HWACCEL_DXVA2(hevc),
3890 #endif
3891 #if CONFIG_HEVC_D3D11VA_HWACCEL
3892  HWACCEL_D3D11VA(hevc),
3893 #endif
3894 #if CONFIG_HEVC_D3D11VA2_HWACCEL
3895  HWACCEL_D3D11VA2(hevc),
3896 #endif
3897 #if CONFIG_HEVC_NVDEC_HWACCEL
3898  HWACCEL_NVDEC(hevc),
3899 #endif
3900 #if CONFIG_HEVC_VAAPI_HWACCEL
3901  HWACCEL_VAAPI(hevc),
3902 #endif
3903 #if CONFIG_HEVC_VDPAU_HWACCEL
3904  HWACCEL_VDPAU(hevc),
3905 #endif
3906 #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
3907  HWACCEL_VIDEOTOOLBOX(hevc),
3908 #endif
3909  NULL
3910  },
3911 };
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:31
HEVCSEIFilmGrainCharacteristics::comp_model_present_flag
int comp_model_present_flag[3]
Definition: hevc_sei.h:129
verify_md5
static int verify_md5(HEVCContext *s, AVFrame *frame)
Definition: hevcdec.c:3384
hwconfig.h
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
HEVC_NAL_RADL_N
@ HEVC_NAL_RADL_N
Definition: hevc.h:35
AVCodecContext::hwaccel
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1379
SliceHeader::beta_offset
int beta_offset
beta_offset_div2 * 2
Definition: hevcdec.h:295
bswapdsp.h
L1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L1
Definition: snow.txt:554
av_buffer_pool_init
AVBufferPool * av_buffer_pool_init(size_t size, AVBufferRef *(*alloc)(size_t size))
Allocate and initialize a buffer pool.
Definition: buffer.c:280
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
HEVC_MAX_PPS_COUNT
@ HEVC_MAX_PPS_COUNT
Definition: hevc.h:114
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
HEVCLocalContext
Definition: hevcdec.h:424
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:225
HEVCFrame::flags
uint8_t flags
A combination of HEVC_FRAME_FLAG_*.
Definition: hevcdec.h:421
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
HWACCEL_MAX
#define HWACCEL_MAX
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
ff_hevc_sao_type_idx_decode
int ff_hevc_sao_type_idx_decode(HEVCContext *s)
Definition: hevc_cabac.c:573
HEVCFrame::tf
ThreadFrame tf
Definition: hevcdec.h:396
HEVCFrame::hwaccel_priv_buf
AVBufferRef * hwaccel_priv_buf
Definition: hevcdec.h:409
level
uint8_t level
Definition: svq3.c:206
ff_hevc_no_residual_syntax_flag_decode
int ff_hevc_no_residual_syntax_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:835
AV_STEREO3D_VIEW_LEFT
@ AV_STEREO3D_VIEW_LEFT
Frame contains only the left view.
Definition: stereo3d.h:156
av_clip
#define av_clip
Definition: common.h:95
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:39
hls_decode_neighbour
static void hls_decode_neighbour(HEVCContext *s, int x_ctb, int y_ctb, int ctb_addr_ts)
Definition: hevcdec.c:2423
ff_hevc_sao_eo_class_decode
int ff_hevc_sao_eo_class_decode(HEVCContext *s)
Definition: hevc_cabac.c:608
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:839
ff_hevc_pred_init
void ff_hevc_pred_init(HEVCPredContext *hpc, int bit_depth)
Definition: hevcpred.c:43
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AV_STEREO3D_SIDEBYSIDE_QUINCUNX
@ AV_STEREO3D_SIDEBYSIDE_QUINCUNX
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:117
ff_dovi_ctx_unref
void ff_dovi_ctx_unref(DOVIContext *s)
Completely reset a DOVIContext, preserving only logctx.
Definition: dovi_rpu.c:43
HEVCSEIUnregistered
Definition: hevc_sei.h:65
hevc_decode_flush
static void hevc_decode_flush(AVCodecContext *avctx)
Definition: hevcdec.c:3841
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:966
ff_hevc_set_qPy
void ff_hevc_set_qPy(HEVCContext *s, int xBase, int yBase, int log2_cb_size)
Definition: hevc_filter.c:119
chroma_mc_bi
static void chroma_mc_bi(HEVCContext *s, uint8_t *dst0, ptrdiff_t dststride, AVFrame *ref0, AVFrame *ref1, int x_off, int y_off, int block_w, int block_h, struct MvField *current_mv, int cidx)
8.5.3.2.2.2 Chroma sample bidirectional interpolation process
Definition: hevcdec.c:1716
PART_NxN
@ PART_NxN
Definition: hevcdec.h:145
luma_mc_bi
static void luma_mc_bi(HEVCContext *s, uint8_t *dst, ptrdiff_t dststride, AVFrame *ref0, const Mv *mv0, int x_off, int y_off, int block_w, int block_h, AVFrame *ref1, const Mv *mv1, struct MvField *current_mv)
8.5.3.2.2.1 Luma sample bidirectional interpolation process
Definition: hevcdec.c:1560
ff_hevc_res_scale_sign_flag
int ff_hevc_res_scale_sign_flag(HEVCContext *s, int idx)
Definition: hevc_cabac.c:912
decode_nal_unit
static int decode_nal_unit(HEVCContext *s, const H2645NAL *nal)
Definition: hevcdec.c:3097
ff_hevc_split_transform_flag_decode
int ff_hevc_split_transform_flag_decode(HEVCContext *s, int log2_trafo_size)
Definition: hevc_cabac.c:873
out
FILE * out
Definition: movenc.c:54
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:684
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, size_t size)
Add a new side data to a frame.
Definition: frame.c:672
SAO_BAND
@ SAO_BAND
Definition: hevcdec.h:211
ff_hevc_profiles
const AVProfile ff_hevc_profiles[]
Definition: profiles.c:83
HEVCSEIFilmGrainCharacteristics::matrix_coeffs
int matrix_coeffs
Definition: hevc_sei.h:126
AVFilmGrainH274Params::color_space
enum AVColorSpace color_space
Definition: film_grain_params.h:152
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2662
ff_hevc_hls_filter
void ff_hevc_hls_filter(HEVCContext *s, int x, int y, int ctb_size)
Definition: hevc_filter.c:851
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FRAME_DATA_A53_CC
@ AV_FRAME_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:59
av_clip_uintp2
#define av_clip_uintp2
Definition: common.h:119
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
src1
const pixel * src1
Definition: h264pred_template.c:421
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1344
get_bits_long
static unsigned int get_bits_long(GetBitContext *s, int n)
Read 0-32 bits.
Definition: get_bits.h:546
HEVCLocalContext::ctb_up_flag
uint8_t ctb_up_flag
Definition: hevcdec.h:442
HEVCFrame::needs_fg
int needs_fg
Definition: hevcdec.h:397
mv
static const int8_t mv[256][2]
Definition: 4xm.c:80
SliceHeader::num_entry_point_offsets
int num_entry_point_offsets
Definition: hevcdec.h:303
HEVC_NAL_STSA_N
@ HEVC_NAL_STSA_N
Definition: hevc.h:33
HEVCFrame::frame_grain
AVFrame * frame_grain
Definition: hevcdec.h:395
AV_FRAME_DATA_FILM_GRAIN_PARAMS
@ AV_FRAME_DATA_FILM_GRAIN_PARAMS
Film grain parameters for a frame, described by AVFilmGrainParams.
Definition: frame.h:184
PART_2NxnU
@ PART_2NxnU
Definition: hevcdec.h:146
AVFilmGrainH274Params::blending_mode_id
int blending_mode_id
Specifies the blending mode used to blend the simulated film grain with the decoded images.
Definition: film_grain_params.h:160
ff_hevc_cu_qp_delta_abs
int ff_hevc_cu_qp_delta_abs(HEVCContext *s)
Definition: hevc_cabac.c:640
AV_FRAME_DATA_S12M_TIMECODE
@ AV_FRAME_DATA_S12M_TIMECODE
Timecode which conforms to SMPTE ST 12-1.
Definition: frame.h:152
av_mod_uintp2
#define av_mod_uintp2
Definition: common.h:122
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:102
H2645NAL::nuh_layer_id
int nuh_layer_id
Definition: h2645_parse.h:67
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
set_deblocking_bypass
static void set_deblocking_bypass(HEVCContext *s, int x0, int y0, int log2_cb_size)
Definition: hevcdec.c:1304
pixdesc.h
HEVCFrame::tab_mvf
MvField * tab_mvf
Definition: hevcdec.h:398
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:959
TransformUnit::cu_qp_delta
int cu_qp_delta
Definition: hevcdec.h:368
HEVC_NAL_TSA_N
@ HEVC_NAL_TSA_N
Definition: hevc.h:31
w
uint8_t w
Definition: llviddspenc.c:38
HWACCEL_DXVA2
#define HWACCEL_DXVA2(codec)
Definition: hwconfig.h:67
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:599
HEVCFrame::hwaccel_picture_private
void * hwaccel_picture_private
Definition: hevcdec.h:410
av_display_matrix_flip
void av_display_matrix_flip(int32_t matrix[9], int hflip, int vflip)
Flip the input matrix horizontally and/or vertically.
Definition: display.c:66
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:374
PAR
#define PAR
Definition: hevcdec.c:3853
INTRA_DC
@ INTRA_DC
Definition: hevcdec.h:173
AVOption
AVOption.
Definition: opt.h:251
HWACCEL_D3D11VA2
#define HWACCEL_D3D11VA2(codec)
Definition: hwconfig.h:69
ff_h2645_packet_uninit
void ff_h2645_packet_uninit(H2645Packet *pkt)
Free all the allocated memory in the packet.
Definition: h2645_parse.c:528
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:499
hevc_decode_free
static av_cold int hevc_decode_free(AVCodecContext *avctx)
Definition: hevcdec.c:3590
ff_hevc_hls_filters
void ff_hevc_hls_filters(HEVCContext *s, int x_ctb, int y_ctb, int ctb_size)
Definition: hevc_filter.c:887
data
const char data[16]
Definition: mxf.c:143
Mv::y
int16_t y
vertical component of motion vector
Definition: hevcdec.h:339
ff_hevc_mpm_idx_decode
int ff_hevc_mpm_idx_decode(HEVCContext *s)
Definition: hevc_cabac.c:759
AV_FRAME_DATA_DOVI_RPU_BUFFER
@ AV_FRAME_DATA_DOVI_RPU_BUFFER
Dolby Vision RPU raw data, suitable for passing to x265 or other libraries.
Definition: frame.h:197
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:406
SAO_EDGE
@ SAO_EDGE
Definition: hevcdec.h:212
ff_hevc_hls_residual_coding
void ff_hevc_hls_residual_coding(HEVCContext *s, int x0, int y0, int log2_trafo_size, enum ScanType scan_idx, int c_idx)
Definition: hevc_cabac.c:1031
SliceHeader::slice_temporal_mvp_enabled_flag
uint8_t slice_temporal_mvp_enabled_flag
Definition: hevcdec.h:275
MvField::mv
Mv mv[2]
Definition: hevcdec.h:343
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:219
TransformUnit::is_cu_qp_delta_coded
uint8_t is_cu_qp_delta_coded
Definition: hevcdec.h:376
FFCodec
Definition: codec_internal.h:112
HEVC_NAL_RASL_N
@ HEVC_NAL_RASL_N
Definition: hevc.h:37
HEVC_NAL_STSA_R
@ HEVC_NAL_STSA_R
Definition: hevc.h:34
MODE_INTRA
@ MODE_INTRA
Definition: hevcdec.h:154
AVFilmGrainH274Params::color_range
enum AVColorRange color_range
Definition: film_grain_params.h:149
av_display_rotation_set
void av_display_rotation_set(int32_t matrix[9], double angle)
Initialize a transformation matrix describing a pure clockwise rotation by the specified angle (in de...
Definition: display.c:51
AV_FRAME_DATA_DISPLAYMATRIX
@ AV_FRAME_DATA_DISPLAYMATRIX
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: frame.h:85
HEVC_NAL_BLA_W_RADL
@ HEVC_NAL_BLA_W_RADL
Definition: hevc.h:46
SliceHeader::slice_loop_filter_across_slices_enabled_flag
uint8_t slice_loop_filter_across_slices_enabled_flag
Definition: hevcdec.h:284
SAOParams::offset_sign
int offset_sign[3][4]
sao_offset_sign
Definition: hevcdsp.h:36
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
export_stream_params
static void export_stream_params(HEVCContext *s, const HEVCSPS *sps)
Definition: hevcdec.c:324
HEVCLocalContext::ctb_up_left_flag
uint8_t ctb_up_left_flag
Definition: hevcdec.h:444
ff_dovi_ctx_replace
int ff_dovi_ctx_replace(DOVIContext *s, const DOVIContext *s0)
Definition: dovi_rpu.c:64
H2645NAL::temporal_id
int temporal_id
HEVC only, nuh_temporal_id_plus_1 - 1.
Definition: h2645_parse.h:62
av_timecode_get_smpte
uint32_t av_timecode_get_smpte(AVRational rate, int drop, int hh, int mm, int ss, int ff)
Convert sei info to SMPTE 12M binary representation.
Definition: timecode.c:69
RefPicList
Definition: hevcdec.h:236
AV_STEREO3D_VIEW_RIGHT
@ AV_STEREO3D_VIEW_RIGHT
Frame contains only the right view.
Definition: stereo3d.h:161
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:649
thread.h
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
OFFSET
#define OFFSET(x)
Definition: hevcdec.c:3852
AVFilmGrainParams::seed
uint64_t seed
Seed to use for the synthesis process, if the codec allows for it.
Definition: film_grain_params.h:228
PF_INTRA
@ PF_INTRA
Definition: hevcdec.h:165
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
MODE_SKIP
@ MODE_SKIP
Definition: hevcdec.h:155
HEVCLocalContext::end_of_tiles_x
int end_of_tiles_x
Definition: hevcdec.h:445
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:98
CodingUnit::x
int x
Definition: hevcdec.h:325
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:467
BOUNDARY_LEFT_TILE
#define BOUNDARY_LEFT_TILE
Definition: hevcdec.h:459
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:1732
golomb.h
exp golomb vlc stuff
AVCodecInternal::is_copy
int is_copy
When using frame-threaded decoding, this field is set for the first worker thread (e....
Definition: internal.h:53
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:67
PART_2Nx2N
@ PART_2Nx2N
Definition: hevcdec.h:142
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:379
SET_SAO
#define SET_SAO(elem, value)
Definition: hevcdec.c:996
HEVCLocalContext::ctb_up_right_flag
uint8_t ctb_up_right_flag
Definition: hevcdec.h:443
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:116
ff_hevc_clear_refs
void ff_hevc_clear_refs(HEVCContext *s)
Mark all frames in DPB as unused for reference.
Definition: hevc_refs.c:68
PRED_BI
@ PRED_BI
Definition: hevcdec.h:161
U
#define U(x)
Definition: vp56_arith.h:37
av_ceil_log2
#define av_ceil_log2
Definition: common.h:92
ff_hevc_split_coding_unit_flag_decode
int ff_hevc_split_coding_unit_flag_decode(HEVCContext *s, int ct_depth, int x0, int y0)
Definition: hevc_cabac.c:693
fail
#define fail()
Definition: checkasm.h:131
PredictionUnit::intra_pred_mode_c
uint8_t intra_pred_mode_c[4]
Definition: hevcdec.h:363
ff_hevc_sao_merge_flag_decode
int ff_hevc_sao_merge_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:568
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1463
md5
struct AVMD5 * md5
Definition: movenc.c:56
InterPredIdc
InterPredIdc
Definition: hevcdec.h:158
MODE_INTER
@ MODE_INTER
Definition: hevcdec.h:153
HEVCSEIA53Caption
Definition: hevc_sei.h:61
timecode.h
GetBitContext
Definition: get_bits.h:61
HEVCLocalContext::pu
PredictionUnit pu
Definition: hevcdec.h:455
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
decode_lt_rps
static int decode_lt_rps(HEVCContext *s, LongTermRPS *rps, GetBitContext *gb)
Definition: hevcdec.c:267
TransformUnit::res_scale_val
int res_scale_val
Definition: hevcdec.h:370
HEVCSEIFilmGrainCharacteristics::present
int present
Definition: hevc_sei.h:118
SliceHeader::short_term_ref_pic_set_size
int short_term_ref_pic_set_size
Definition: hevcdec.h:266
hevc_decoder_class
static const AVClass hevc_decoder_class
Definition: hevcdec.c:3863
AVFilmGrainParams::codec
union AVFilmGrainParams::@301 codec
Additional fields may be added both here and in any structure included.
val
static double val(void *priv, double ch)
Definition: aeval.c:77
HWACCEL_VDPAU
#define HWACCEL_VDPAU(codec)
Definition: hwconfig.h:75
ff_videodsp_init
av_cold void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:39
ff_hevc_output_frame
int ff_hevc_output_frame(HEVCContext *s, AVFrame *out, int flush)
Find next frame in output order and put a reference to it in frame.
Definition: hevc_refs.c:176
HEVCSEIFilmGrainCharacteristics::bit_depth_chroma
int bit_depth_chroma
Definition: hevc_sei.h:122
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:577
SliceHeader::long_term_ref_pic_set_size
int long_term_ref_pic_set_size
Definition: hevcdec.h:269
HEVCSEIFilmGrainCharacteristics::log2_scale_factor
int log2_scale_factor
Definition: hevc_sei.h:128
ss
#define ss(width, name, subs,...)
Definition: cbs_vp9.c:260
ff_hevc_luma_mv_mvp_mode
void ff_hevc_luma_mv_mvp_mode(HEVCContext *s, int x0, int y0, int nPbW, int nPbH, int log2_cb_size, int part_idx, int merge_idx, MvField *mv, int mvp_lx_flag, int LX)
Definition: hevc_mvs.c:583
CTB
#define CTB(tab, x, y)
Definition: hevcdec.c:994
ff_reset_entries
void ff_reset_entries(AVCodecContext *avctx)
Definition: pthread_slice.c:260
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
ff_hevc_decode_nal_sei
int ff_hevc_decode_nal_sei(GetBitContext *gb, void *logctx, HEVCSEI *s, const HEVCParamSets *ps, enum HEVCNALUnitType type)
Definition: hevc_sei.c:552
ff_hevc_skip_flag_decode
int ff_hevc_skip_flag_decode(HEVCContext *s, int x0, int y0, int x_cb, int y_cb)
Definition: hevc_cabac.c:625
ff_hevc_merge_flag_decode
int ff_hevc_merge_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:799
AVRational::num
int num
Numerator.
Definition: rational.h:59
HEVC_NAL_UNSPEC62
@ HEVC_NAL_UNSPEC62
Definition: hevc.h:91
SliceHeader::slice_segment_addr
unsigned int slice_segment_addr
address (in raster order) of the first block in the current slice
Definition: hevcdec.h:251
AVFilmGrainH274Params::intensity_interval_upper_bound
uint8_t intensity_interval_upper_bound[3][256]
Specifies the upper bound of each intensity interval for which the set of model values applies for th...
Definition: film_grain_params.h:194
hevc_parse.h
MvField::ref_idx
int8_t ref_idx[2]
Definition: hevcdec.h:344
ff_hevc_save_states
void ff_hevc_save_states(HEVCContext *s, int ctb_addr_ts)
Definition: hevc_cabac.c:450
AVFilmGrainH274Params::bit_depth_luma
int bit_depth_luma
Specifies the bit depth used for the luma component.
Definition: film_grain_params.h:142
ff_hevc_deblocking_boundary_strengths
void ff_hevc_deblocking_boundary_strengths(HEVCContext *s, int x0, int y0, int log2_trafo_size)
Definition: hevc_filter.c:723
SAOParams::eo_class
int eo_class[3]
sao_eo_class
Definition: hevcdsp.h:40
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:409
ff_hevc_prev_intra_luma_pred_flag_decode
int ff_hevc_prev_intra_luma_pred_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:754
ff_thread_report_progress2
void ff_thread_report_progress2(AVCodecContext *avctx, int field, int thread, int n)
Definition: pthread_slice.c:210
first
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But first
Definition: rate_distortion.txt:12
hls_decode_entry_wpp
static int hls_decode_entry_wpp(AVCodecContext *avctxt, void *input_ctb_row, int job, int self_id)
Definition: hevcdec.c:2544
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:952
AV_STEREO3D_FRAMESEQUENCE
@ AV_STEREO3D_FRAMESEQUENCE
Views are alternated temporally.
Definition: stereo3d.h:92
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
QPEL_EXTRA_AFTER
#define QPEL_EXTRA_AFTER
Definition: hevcdec.h:64
HEVC_NAL_BLA_N_LP
@ HEVC_NAL_BLA_N_LP
Definition: hevc.h:47
SAOParams::type_idx
uint8_t type_idx[3]
sao_type_idx
Definition: hevcdsp.h:44
film_grain_params.h
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
TransformUnit::intra_pred_mode
int intra_pred_mode
Definition: hevcdec.h:373
ff_hevc_hls_mvd_coding
void ff_hevc_hls_mvd_coding(HEVCContext *s, int x0, int y0, int log2_cb_size)
Definition: hevc_cabac.c:1541
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:667
HEVCSEIFilmGrainCharacteristics::full_range
int full_range
Definition: hevc_sei.h:123
HEVC_NAL_RADL_R
@ HEVC_NAL_RADL_R
Definition: hevc.h:36
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:595
SliceHeader::cabac_init_flag
uint8_t cabac_init_flag
Definition: hevcdec.h:282
H2645NAL::size
int size
Definition: h2645_parse.h:36
hls_pcm_sample
static int hls_pcm_sample(HEVCContext *s, int x0, int y0, int log2_cb_size)
Definition: hevcdec.c:1442
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:491
AVCodecContext::has_b_frames
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:685
VUI::matrix_coeffs
uint8_t matrix_coeffs
Definition: hevc_ps.h:61
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:254
QPEL_EXTRA_BEFORE
#define QPEL_EXTRA_BEFORE
Definition: hevcdec.h:63
stereo3d.h
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:256
ff_thread_await_progress2
void ff_thread_await_progress2(AVCodecContext *avctx, int field, int thread, int shift)
Definition: pthread_slice.c:222
SAO_NOT_APPLIED
@ SAO_NOT_APPLIED
Definition: hevcdec.h:210
hls_sao_param
static void hls_sao_param(HEVCContext *s, int rx, int ry)
Definition: hevcdec.c:1008
set_sps
static int set_sps(HEVCContext *s, const HEVCSPS *sps, enum AVPixelFormat pix_fmt)
Definition: hevcdec.c:495
HEVCSEIFilmGrainCharacteristics::num_intensity_intervals
uint16_t num_intensity_intervals[3]
Definition: hevc_sei.h:130
AV_ZERO32
#define AV_ZERO32(d)
Definition: intreadwrite.h:629
ff_hevc_ref_idx_lx_decode
int ff_hevc_ref_idx_lx_decode(HEVCContext *s, int num_ref_idx_lx)
Definition: hevc_cabac.c:814
s1
#define s1
Definition: regdef.h:38
ff_hevc_nal_is_nonref
static av_always_inline int ff_hevc_nal_is_nonref(enum HEVCNALUnitType type)
Definition: hevcdec.h:640
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
av_film_grain_params_create_side_data
AVFilmGrainParams * av_film_grain_params_create_side_data(AVFrame *frame)
Allocate a complete AVFilmGrainParams and add it to the frame.
Definition: film_grain_params.c:31
luma_intra_pred_mode
static int luma_intra_pred_mode(HEVCContext *s, int x0, int y0, int pu_size, int prev_intra_luma_pred_flag)
8.4.1
Definition: hevcdec.c:1987
ff_hevc_set_new_ref
int ff_hevc_set_new_ref(HEVCContext *s, AVFrame **frame, int poc)
Definition: hevc_refs.c:137
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
SliceHeader::slice_rps
ShortTermRPS slice_rps
Definition: hevcdec.h:267
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
HEVCSEIFilmGrainCharacteristics::model_id
int model_id
Definition: hevc_sei.h:119
ff_hevc_cu_transquant_bypass_flag_decode
int ff_hevc_cu_transquant_bypass_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:620
IS_IDR
#define IS_IDR(s)
Definition: hevcdec.h:75
ff_hevc_intra_chroma_pred_mode_decode
int ff_hevc_intra_chroma_pred_mode_decode(HEVCContext *s)
Definition: hevc_cabac.c:777
set_ct_depth
static av_always_inline void set_ct_depth(HEVCContext *s, int x0, int y0, int log2_cb_size, int ct_depth)
Definition: hevcdec.c:2067
H2645NAL::data
const uint8_t * data
Definition: h2645_parse.h:35
ff_hevc_slice_rpl
int ff_hevc_slice_rpl(HEVCContext *s)
Construct the reference picture list(s) for the current slice.
Definition: hevc_refs.c:299
RefPicList::ref
struct HEVCFrame * ref[HEVC_MAX_REFS]
Definition: hevcdec.h:237
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
ff_hevc_sao_offset_abs_decode
int ff_hevc_sao_offset_abs_decode(HEVCContext *s)
Definition: hevc_cabac.c:593
H2645NAL::skipped_bytes_pos
int * skipped_bytes_pos
Definition: h2645_parse.h:71
VUI::colour_primaries
uint8_t colour_primaries
Definition: hevc_ps.h:59
HEVC_SLICE_I
@ HEVC_SLICE_I
Definition: hevc.h:98
hls_coding_unit
static int hls_coding_unit(HEVCContext *s, int x0, int y0, int log2_cb_size)
Definition: hevcdec.c:2175
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
SliceHeader::size
int * size
Definition: hevcdec.h:302
HEVCSEIUnregistered::buf_ref
AVBufferRef ** buf_ref
Definition: hevc_sei.h:66
AVFilmGrainH274Params::comp_model_value
int16_t comp_model_value[3][256][6]
Specifies the model values for the component for each intensity interval.
Definition: film_grain_params.h:205
SliceHeader::collocated_list
uint8_t collocated_list
Definition: hevcdec.h:285
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
ff_hevc_luma_mv_merge_mode
void ff_hevc_luma_mv_merge_mode(HEVCContext *s, int x0, int y0, int nPbW, int nPbH, int log2_cb_size, int part_idx, int merge_idx, MvField *mv)
Definition: hevc_mvs.c:480
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:474
AVDISCARD_BIDIR
@ AVDISCARD_BIDIR
discard all bidirectional frames
Definition: defs.h:51
get_se_golomb
static int get_se_golomb(GetBitContext *gb)
read signed exp golomb code.
Definition: golomb.h:239
INTRA_ANGULAR_26
@ INTRA_ANGULAR_26
Definition: hevcdec.h:198
H2645NAL::type
int type
NAL unit type.
Definition: h2645_parse.h:52
CodingUnit::max_trafo_depth
uint8_t max_trafo_depth
MaxTrafoDepth.
Definition: hevcdec.h:333
AV_FRAME_DATA_DYNAMIC_HDR_VIVID
@ AV_FRAME_DATA_DYNAMIC_HDR_VIVID
HDR Vivid dynamic metadata associated with a video frame.
Definition: frame.h:211
SliceHeader::slice_ctb_addr_rs
int slice_ctb_addr_rs
Definition: hevcdec.h:321
ff_thread_ref_frame
int ff_thread_ref_frame(ThreadFrame *dst, const ThreadFrame *src)
Definition: utils.c:891
FF_CODEC_PROPERTY_FILM_GRAIN
#define FF_CODEC_PROPERTY_FILM_GRAIN
Definition: avcodec.h:1847
arg
const char * arg
Definition: jacosubdec.c:67
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
if
if(ret)
Definition: filter_design.txt:179
HEVC_NAL_IDR_N_LP
@ HEVC_NAL_IDR_N_LP
Definition: hevc.h:49
AVFilmGrainH274Params::model_id
int model_id
Specifies the film grain simulation mode.
Definition: film_grain_params.h:137
SliceHeader::pic_output_flag
uint8_t pic_output_flag
Definition: hevcdec.h:261
hls_slice_data_wpp
static int hls_slice_data_wpp(HEVCContext *s, const H2645NAL *nal)
Definition: hevcdec.c:2625
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:113
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:54
ff_hevc_sao_offset_sign_decode
int ff_hevc_sao_offset_sign_decode(HEVCContext *s)
Definition: hevc_cabac.c:603
threadframe.h
PredictionUnit::rem_intra_luma_pred_mode
int rem_intra_luma_pred_mode
Definition: hevcdec.h:359
H2645NAL::raw_size
int raw_size
Definition: h2645_parse.h:44
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
IS_BLA
#define IS_BLA(s)
Definition: hevcdec.h:76
ff_hevc_merge_idx_decode
int ff_hevc_merge_idx_decode(HEVCContext *s)
Definition: hevc_cabac.c:788
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
ff_bswapdsp_init
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
HEVC_SLICE_B
@ HEVC_SLICE_B
Definition: hevc.h:96
NULL
#define NULL
Definition: coverity.c:32
hevc_ref_frame
static int hevc_ref_frame(HEVCContext *s, HEVCFrame *dst, HEVCFrame *src)
Definition: hevcdec.c:3543
HEVCSEIFilmGrainCharacteristics::separate_colour_description_present_flag
int separate_colour_description_present_flag
Definition: hevc_sei.h:120
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:973
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
HEVCLocalContext::tmp
int16_t tmp[MAX_PB_SIZE *MAX_PB_SIZE]
Definition: hevcdec.h:451
ff_hevc_ps_uninit
void ff_hevc_ps_uninit(HEVCParamSets *ps)
Definition: hevc_ps.c:1744
HEVC_NAL_PPS
@ HEVC_NAL_PPS
Definition: hevc.h:63
LongTermRPS::poc
int poc[32]
Definition: hevcdec.h:230
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:620
CodingUnit::cu_transquant_bypass_flag
uint8_t cu_transquant_bypass_flag
Definition: hevcdec.h:334
AVCodecContext::internal
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:424
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
HEVCSEIFilmGrainCharacteristics::color_primaries
int color_primaries
Definition: hevc_sei.h:124
HEVCLocalContext::first_qp_group
uint8_t first_qp_group
Definition: hevcdec.h:429
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
hls_transform_unit
static int hls_transform_unit(HEVCContext *s, int x0, int y0, int xBase, int yBase, int cb_xBase, int cb_yBase, int log2_cb_size, int log2_trafo_size, int blk_idx, int cbf_luma, int *cbf_cb, int *cbf_cr)
Definition: hevcdec.c:1098
AVHWAccel::end_frame
int(* end_frame)(AVCodecContext *avctx)
Called at the end of each frame or field picture.
Definition: avcodec.h:2167
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:498
ff_dovi_update_cfg
void ff_dovi_update_cfg(DOVIContext *s, const AVDOVIDecoderConfigurationRecord *cfg)
Read the contents of an AVDOVIDecoderConfigurationRecord (usually provided by stream side data) and u...
Definition: dovi_rpu.c:83
profiles.h
av_buffer_pool_uninit
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:322
L0
#define L0
Definition: hevcdec.h:57
HEVCFrame::rpl_tab
RefPicListTab ** rpl_tab
Definition: hevcdec.h:400
LongTermRPS::poc_msb_present
uint8_t poc_msb_present[32]
Definition: hevcdec.h:231
HEVC_NAL_SEI_SUFFIX
@ HEVC_NAL_SEI_SUFFIX
Definition: hevc.h:69
HEVCSEIFilmGrainCharacteristics::intensity_interval_lower_bound
uint8_t intensity_interval_lower_bound[3][256]
Definition: hevc_sei.h:132
HEVC_NAL_CRA_NUT
@ HEVC_NAL_CRA_NUT
Definition: hevc.h:50
double
double
Definition: af_crystalizer.c:132
av_frame_new_side_data_from_buf
AVFrameSideData * av_frame_new_side_data_from_buf(AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef *buf)
Add a new side data to a frame from an existing AVBufferRef.
Definition: frame.c:640
ONLY_IF_THREADS_ENABLED
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:156
hevc_pel_weight
static const uint8_t hevc_pel_weight[65]
Definition: hevcdec.c:55
PART_Nx2N
@ PART_Nx2N
Definition: hevcdec.h:144
RefPicListTab
Definition: hevcdec.h:243
BOUNDARY_UPPER_TILE
#define BOUNDARY_UPPER_TILE
Definition: hevcdec.h:461
vps
static int FUNC() vps(CodedBitstreamContext *ctx, RWContext *rw, H265RawVPS *current)
Definition: cbs_h265_syntax_template.c:423
ff_hevc_decode_extradata
int ff_hevc_decode_extradata(const uint8_t *data, int size, HEVCParamSets *ps, HEVCSEI *sei, int *is_nalff, int *nal_length_size, int err_recognition, int apply_defdispwin, void *logctx)
Definition: hevc_parse.c:80
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1355
SliceHeader::nb_refs
unsigned int nb_refs[2]
Definition: hevcdec.h:277
Mv::x
int16_t x
horizontal component of motion vector
Definition: hevcdec.h:338
ff_slice_thread_init_progress
int av_cold ff_slice_thread_init_progress(AVCodecContext *avctx)
Definition: pthread_slice.c:179
AVCodecContext::level
int level
level
Definition: avcodec.h:1673
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:565
HEVC_NAL_RASL_R
@ HEVC_NAL_RASL_R
Definition: hevc.h:38
PF_BI
@ PF_BI
Definition: hevcdec.h:168
AV_FRAME_DATA_SEI_UNREGISTERED
@ AV_FRAME_DATA_SEI_UNREGISTERED
User data unregistered metadata associated with a video frame.
Definition: frame.h:178
SAMPLE_CTB
#define SAMPLE_CTB(tab, x, y)
Definition: hevcdec.h:73
HEVCWindow
Definition: hevc_ps.h:42
SCAN_HORIZ
@ SCAN_HORIZ
Definition: hevcdec.h:225
hevc_data.h
ff_hevc_frame_rps
int ff_hevc_frame_rps(HEVCContext *s)
Construct the reference picture sets for the current frame.
Definition: hevc_refs.c:451
HEVCLocalContext::edge_emu_buffer
uint8_t edge_emu_buffer[(MAX_PB_SIZE+7) *EDGE_EMU_BUFFER_STRIDE *2]
Definition: hevcdec.h:448
IS_IRAP
#define IS_IRAP(s)
Definition: hevcdec.h:78
HEVCSEIFilmGrainCharacteristics::blending_mode_id
int blending_mode_id
Definition: hevc_sei.h:127
LongTermRPS::used
uint8_t used[32]
Definition: hevcdec.h:232
SliceHeader::colour_plane_id
uint8_t colour_plane_id
RPS coded in the slice header itself is stored here.
Definition: hevcdec.h:262
ff_hevc_mvp_lx_flag_decode
int ff_hevc_mvp_lx_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:830
PART_nLx2N
@ PART_nLx2N
Definition: hevcdec.h:148
SliceHeader::dependent_slice_segment_flag
uint8_t dependent_slice_segment_flag
Definition: hevcdec.h:260
POS
#define POS(c_idx, x, y)
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:53
SliceHeader::first_slice_in_pic_flag
uint8_t first_slice_in_pic_flag
Definition: hevcdec.h:259
HEVCLocalContext::ctb_left_flag
uint8_t ctb_left_flag
Definition: hevcdec.h:441
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
chroma_mc_uni
static void chroma_mc_uni(HEVCContext *s, uint8_t *dst0, ptrdiff_t dststride, uint8_t *src0, ptrdiff_t srcstride, int reflist, int x_off, int y_off, int block_w, int block_h, struct MvField *current_mv, int chroma_weight, int chroma_offset)
8.5.3.2.2.2 Chroma sample uniprediction interpolation process
Definition: hevcdec.c:1651
ff_dovi_ctx_flush
void ff_dovi_ctx_flush(DOVIContext *s)
Partially reset the internal state.
Definition: dovi_rpu.c:53
ff_hevc_pred_mode_decode
int ff_hevc_pred_mode_decode(HEVCContext *s)
Definition: hevc_cabac.c:688
AVPacket::size
int size
Definition: packet.h:375
BOUNDARY_UPPER_SLICE
#define BOUNDARY_UPPER_SLICE
Definition: hevcdec.h:460
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
hevcdec.h
ff_hevc_set_neighbour_available
void ff_hevc_set_neighbour_available(HEVCContext *s, int x0, int y0, int nPbW, int nPbH)
Definition: hevc_mvs.c:43
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:343
decode_nal_units
static int decode_nal_units(HEVCContext *s, const uint8_t *buf, int length)
Definition: hevcdec.c:3288
codec_internal.h
HEVCSEIFilmGrainCharacteristics::intensity_interval_upper_bound
uint8_t intensity_interval_upper_bound[3][256]
Definition: hevc_sei.h:133
AVFilmGrainH274Params::component_model_present
int component_model_present[3]
Indicates if the modelling of film grain for a given component is present.
Definition: film_grain_params.h:170
SAOParams::offset_abs
int offset_abs[3][4]
sao_offset_abs
Definition: hevcdsp.h:35
AV_PIX_FMT_YUV422P10LE
@ AV_PIX_FMT_YUV422P10LE
planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
Definition: pixfmt.h:151
print_md5
static void print_md5(void *log_ctx, int level, uint8_t md5[16])
Definition: hevcdec.c:3377
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:121
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
INTRA_PLANAR
@ INTRA_PLANAR
Definition: hevcdec.h:172
HEVCFrame::rpl_buf
AVBufferRef * rpl_buf
Definition: hevcdec.h:407
ff_hevc_decode_nal_sps
int ff_hevc_decode_nal_sps(GetBitContext *gb, AVCodecContext *avctx, HEVCParamSets *ps, int apply_defdispwin)
Definition: hevc_ps.c:1247
PART_2NxnD
@ PART_2NxnD
Definition: hevcdec.h:147
ff_hevc_cabac_init
int ff_hevc_cabac_init(HEVCContext *s, int ctb_addr_ts, int thread)
Definition: hevc_cabac.c:511
FF_CODEC_CAP_EXPORTS_CROPPING
#define FF_CODEC_CAP_EXPORTS_CROPPING
The decoder sets the cropping fields in the output frames manually.
Definition: codec_internal.h:57
size
int size
Definition: twinvq_data.h:10344
HEVC_NAL_BLA_W_LP
@ HEVC_NAL_BLA_W_LP
Definition: hevc.h:45
SCAN_VERT
@ SCAN_VERT
Definition: hevcdec.h:226
VUI::transfer_characteristic
uint8_t transfer_characteristic
Definition: hevc_ps.h:60
FF_CODEC_CAP_ALLOCATE_PROGRESS
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: codec_internal.h:66
ff_hevc_compute_poc
int ff_hevc_compute_poc(const HEVCSPS *sps, int pocTid0, int poc_lsb, int nal_unit_type)
Compute POC of the current frame and return it.
Definition: hevc_ps.c:1760
H2645NAL::gb
GetBitContext gb
Definition: h2645_parse.h:47
intra_prediction_unit_default_value
static void intra_prediction_unit_default_value(HEVCContext *s, int x0, int y0, int log2_cb_size)
Definition: hevcdec.c:2152
SliceHeader::collocated_ref_idx
unsigned int collocated_ref_idx
Definition: hevcdec.h:287
SliceHeader::entry_point_offset
unsigned * entry_point_offset
Definition: hevcdec.h:300
VUI
Definition: hevc_ps.h:49
H2645NAL
Definition: h2645_parse.h:34
hevc_await_progress
static void hevc_await_progress(HEVCContext *s, HEVCFrame *ref, const Mv *mv, int y0, int height)
Definition: hevcdec.c:1806
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:413
ff_hevc_decode_nal_vps
int ff_hevc_decode_nal_vps(GetBitContext *gb, AVCodecContext *avctx, HEVCParamSets *ps)
Definition: hevc_ps.c:455
pic_arrays_free
static void pic_arrays_free(HEVCContext *s)
NOTE: Each function hls_foo correspond to the function foo in the specification (HLS stands for High ...
Definition: hevcdec.c:67
AVFrameSideData::data
uint8_t * data
Definition: frame.h:233
TransformUnit::chroma_mode_c
int chroma_mode_c
Definition: hevcdec.h:375
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1475
AVFilmGrainParams
This structure describes how to handle film grain synthesis in video for specific codecs.
Definition: film_grain_params.h:216
GetBitContext::index
int index
Definition: get_bits.h:67
SliceHeader::short_term_ref_pic_set_sps_flag
int short_term_ref_pic_set_sps_flag
Definition: hevcdec.h:265
AVCHROMA_LOC_UNSPECIFIED
@ AVCHROMA_LOC_UNSPECIFIED
Definition: pixfmt.h:619
SliceHeader::no_output_of_prior_pics_flag
uint8_t no_output_of_prior_pics_flag
Definition: hevcdec.h:274
SliceHeader::max_num_merge_cand
unsigned int max_num_merge_cand
5 - 5_minus_max_num_merge_cand
Definition: hevcdec.h:298
AVCodecHWConfigInternal
Definition: hwconfig.h:29
MvField
Definition: hevcdec.h:342
QPEL_EXTRA
#define QPEL_EXTRA
Definition: hevcdec.h:65
PF_L1
@ PF_L1
Definition: hevcdec.h:167
ff_hevc_unref_frame
void ff_hevc_unref_frame(HEVCContext *s, HEVCFrame *frame, int flags)
Definition: hevc_refs.c:31
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
get_format
static enum AVPixelFormat get_format(HEVCContext *s, const HEVCSPS *sps)
Definition: hevcdec.c:400
ff_h2645_packet_split
int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, void *logctx, int is_nalff, int nal_length_size, enum AVCodecID codec_id, int small_padding, int use_ref)
Split an input packet into NAL units.
Definition: h2645_parse.c:396
height
#define height
av_reallocp_array
int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
Allocate, reallocate an array through a pointer to a pointer.
Definition: mem.c:233
hevc_frame_end
static int hevc_frame_end(HEVCContext *s)
Definition: hevcdec.c:3075
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:117
HWACCEL_D3D11VA
#define HWACCEL_D3D11VA(codec)
Definition: hwconfig.h:79
ff_hevc_pcm_flag_decode
int ff_hevc_pcm_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:749
av_content_light_metadata_create_side_data
AVContentLightMetadata * av_content_light_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVContentLightMetadata and add it to the frame.
Definition: mastering_display_metadata.c:55
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_hevc_cbf_cb_cr_decode
int ff_hevc_cbf_cb_cr_decode(HEVCContext *s, int trafo_depth)
Definition: hevc_cabac.c:878
attributes.h
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:305
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
HWACCEL_NVDEC
#define HWACCEL_NVDEC(codec)
Definition: hwconfig.h:71
AVFilmGrainParams::h274
AVFilmGrainH274Params h274
Definition: film_grain_params.h:237
hls_slice_data
static int hls_slice_data(HEVCContext *s)
Definition: hevcdec.c:2533
TransformUnit::cu_qp_offset_cb
int8_t cu_qp_offset_cb
Definition: hevcdec.h:378
pic_arrays_init
static int pic_arrays_init(HEVCContext *s, const HEVCSPS *sps)
Definition: hevcdec.c:95
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
HEVCFrame::rpl_tab_buf
AVBufferRef * rpl_tab_buf
Definition: hevcdec.h:406
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
MvField::pred_flag
int8_t pred_flag
Definition: hevcdec.h:345
HEVCLocalContext::ct_depth
int ct_depth
Definition: hevcdec.h:453
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
FF_THREAD_FRAME
#define FF_THREAD_FRAME
Decode more than one frame at once.
Definition: avcodec.h:1474
ff_init_cabac_decoder
int ff_init_cabac_decoder(CABACContext *c, const uint8_t *buf, int buf_size)
Definition: cabac.c:162
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:187
PART_nRx2N
@ PART_nRx2N
Definition: hevcdec.h:149
EPEL_EXTRA_BEFORE
#define EPEL_EXTRA_BEFORE
Definition: hevcdec.h:60
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:274
SliceHeader::slice_cb_qp_offset
int slice_cb_qp_offset
Definition: hevcdec.h:290
SliceHeader
Definition: hevcdec.h:247
HEVCFrame::frame
AVFrame * frame
Definition: hevcdec.h:394
HEVC_NAL_TRAIL_R
@ HEVC_NAL_TRAIL_R
Definition: hevc.h:30
hls_decode_entry
static int hls_decode_entry(AVCodecContext *avctxt, void *isFilterThread)
Definition: hevcdec.c:2472
ff_hevc_inter_pred_idc_decode
int ff_hevc_inter_pred_idc_decode(HEVCContext *s, int nPbW, int nPbH)
Definition: hevc_cabac.c:804
ff_hevc_cu_qp_delta_sign_flag
int ff_hevc_cu_qp_delta_sign_flag(HEVCContext *s)
Definition: hevc_cabac.c:667
hevc_frame_start
static int hevc_frame_start(HEVCContext *s)
Definition: hevcdec.c:3004
av_md5_init
void av_md5_init(AVMD5 *ctx)
Initialize MD5 hashing.
Definition: md5.c:141
ff_h274_apply_film_grain
int ff_h274_apply_film_grain(AVFrame *out_frame, const AVFrame *in_frame, H274FilmGrainDatabase *database, const AVFilmGrainParams *params)
Definition: h274.c:217
SliceHeader::slice_sample_adaptive_offset_flag
uint8_t slice_sample_adaptive_offset_flag[3]
Definition: hevcdec.h:279
AVDISCARD_NONINTRA
@ AVDISCARD_NONINTRA
discard all non intra frames
Definition: defs.h:52
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
av_timecode_make_smpte_tc_string2
char * av_timecode_make_smpte_tc_string2(char *buf, AVRational rate, uint32_t tcsmpte, int prevent_df, int skip_field)
Get the timecode string from the SMPTE timecode format.
Definition: timecode.c:138
AVCodecContext::properties
unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:1844
HEVCFrame
Definition: hevcdec.h:393
AVCodecContext::extradata
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:490
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: avpacket.c:251
HEVCLocalContext::gb
GetBitContext gb
Definition: hevcdec.h:431
ff_hevc_cbf_luma_decode
int ff_hevc_cbf_luma_decode(HEVCContext *s, int trafo_depth)
Definition: hevc_cabac.c:883
internal.h
EPEL_EXTRA_AFTER
#define EPEL_EXTRA_AFTER
Definition: hevcdec.h:61
AVFilmGrainH274Params::num_intensity_intervals
uint16_t num_intensity_intervals[3]
Specifies the number of intensity intervals for which a specific set of model values has been estimat...
Definition: film_grain_params.h:176
HEVCFrame::ctb_count
int ctb_count
Definition: hevcdec.h:401
src2
const pixel * src2
Definition: h264pred_template.c:422
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
display.h
SliceHeader::offset
int * offset
Definition: hevcdec.h:301
AV_STEREO3D_TOPBOTTOM
@ AV_STEREO3D_TOPBOTTOM
Views are on top of each other.
Definition: stereo3d.h:79
common.h
HEVCFrame::sequence
uint16_t sequence
A sequence counter, so that old frames are output first after a POC reset.
Definition: hevcdec.h:416
SliceHeader::mvd_l1_zero_flag
uint8_t mvd_l1_zero_flag
Definition: hevcdec.h:280
delta
float delta
Definition: vorbis_enc_data.h:430
md5.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:224
ff_hevc_bump_frame
void ff_hevc_bump_frame(HEVCContext *s)
Definition: hevc_refs.c:241
av_always_inline
#define av_always_inline
Definition: attributes.h:49
HEVC_SLICE_P
@ HEVC_SLICE_P
Definition: hevc.h:97
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:506
PF_L0
@ PF_L0
Definition: hevcdec.h:166
EDGE_EMU_BUFFER_STRIDE
#define EDGE_EMU_BUFFER_STRIDE
Definition: hevcdec.h:67
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: codec_internal.h:31
tab_mode_idx
static const uint8_t tab_mode_idx[]
Definition: hevcdec.c:2080
cabac_functions.h
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:477
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
HEVCLocalContext::qp_y
int8_t qp_y
Definition: hevcdec.h:434
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:203
av_buffer_replace
int av_buffer_replace(AVBufferRef **pdst, const AVBufferRef *src)
Ensure dst refers to the same data as src.
Definition: buffer.c:233
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:980
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
HEVC_NAL_TSA_R
@ HEVC_NAL_TSA_R
Definition: hevc.h:32
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:528
SliceHeader::list_entry_lx
unsigned int list_entry_lx[2][32]
Definition: hevcdec.h:271
AVFilmGrainH274Params::color_primaries
enum AVColorPrimaries color_primaries
Definition: film_grain_params.h:150
AVCodecContext::height
int height
Definition: avcodec.h:562
HEVCSEIA53Caption::buf_ref
AVBufferRef * buf_ref
Definition: hevc_sei.h:62
hevc_decode_extradata
static int hevc_decode_extradata(HEVCContext *s, uint8_t *buf, int length, int first)
Definition: hevcdec.c:3450
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:599
av_md5_final
void av_md5_final(AVMD5 *ctx, uint8_t *dst)
Finish hashing and output digest value.
Definition: md5.c:186
hevc_decode_init
static av_cold int hevc_decode_init(AVCodecContext *avctx)
Definition: hevcdec.c:3801
HEVCFrame::poc
int poc
Definition: hevcdec.h:402
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:582
AVFilmGrainH274Params::intensity_interval_lower_bound
uint8_t intensity_interval_lower_bound[3][256]
Specifies the lower ounds of each intensity interval for whichthe set of model values applies for the...
Definition: film_grain_params.h:188
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
HWACCEL_VIDEOTOOLBOX
#define HWACCEL_VIDEOTOOLBOX(codec)
Definition: hwconfig.h:77
hevc.h
ff_hevc_cu_chroma_qp_offset_idx
int ff_hevc_cu_chroma_qp_offset_idx(HEVCContext *s)
Definition: hevc_cabac.c:677
SAOParams
Definition: hevcdsp.h:34
SliceHeader::short_term_rps
const ShortTermRPS * short_term_rps
Definition: hevcdec.h:268
stride
#define stride
Definition: h264pred_template.c:537
ff_dovi_rpu_parse
int ff_dovi_rpu_parse(DOVIContext *s, const uint8_t *rpu, size_t rpu_size)
Parse the contents of a Dovi RPU NAL and update the parsed values in the DOVIContext struct.
Definition: dovi_rpu.c:194
HEVC_NAL_VPS
@ HEVC_NAL_VPS
Definition: hevc.h:61
SliceHeader::cu_chroma_qp_offset_enabled_flag
uint8_t cu_chroma_qp_offset_enabled_flag
Definition: hevcdec.h:293
HEVC_NAL_IDR_W_RADL
@ HEVC_NAL_IDR_W_RADL
Definition: hevc.h:48
ff_hevc_cu_chroma_qp_offset_flag
int ff_hevc_cu_chroma_qp_offset_flag(HEVCContext *s)
Definition: hevc_cabac.c:672
av_buffer_allocz
AVBufferRef * av_buffer_allocz(size_t size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
Definition: buffer.c:93
ret
ret
Definition: filter_design.txt:187
AV_PKT_DATA_DOVI_CONF
@ AV_PKT_DATA_DOVI_CONF
DOVI configuration ref: dolby-vision-bitstreams-within-the-iso-base-media-file-format-v2....
Definition: packet.h:284
H2645NAL::raw_data
const uint8_t * raw_data
Definition: h2645_parse.h:45
ff_hevc_reset_sei
void ff_hevc_reset_sei(HEVCSEI *s)
Reset SEI values that are stored on the Context.
Definition: hevc_sei.c:570
VUI::colour_description_present_flag
int colour_description_present_flag
Definition: hevc_ps.h:58
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
PRED_L1
@ PRED_L1
Definition: hevcdec.h:160
PredictionUnit::mvd
Mv mvd
Definition: hevcdec.h:361
SliceHeader::disable_deblocking_filter_flag
uint8_t disable_deblocking_filter_flag
slice_header_disable_deblocking_filter_flag
Definition: hevcdec.h:283
ff_hevc_dsp_init
void ff_hevc_dsp_init(HEVCDSPContext *hevcdsp, int bit_depth)
Definition: hevcdsp.c:126
HEVCLocalContext::edge_emu_buffer2
uint8_t edge_emu_buffer2[(MAX_PB_SIZE+7) *EDGE_EMU_BUFFER_STRIDE *2]
Definition: hevcdec.h:450
AV_EF_CRCCHECK
#define AV_EF_CRCCHECK
Verify checksums embedded in the bitstream (could be of either encoded or decoded data,...
Definition: avcodec.h:1352
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
sps
static int FUNC() sps(CodedBitstreamContext *ctx, RWContext *rw, H264RawSPS *current)
Definition: cbs_h264_syntax_template.c:260
hevc_init_context
static av_cold int hevc_init_context(AVCodecContext *avctx)
Definition: hevcdec.c:3639
pos
unsigned int pos
Definition: spdifenc.c:412
hevc_luma_mv_mvp_mode
static void hevc_luma_mv_mvp_mode(HEVCContext *s, int x0, int y0, int nPbW, int nPbH, int log2_cb_size, int part_idx, int merge_idx, MvField *mv)
Definition: hevcdec.c:1816
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:410
HEVC_NAL_EOS_NUT
@ HEVC_NAL_EOS_NUT
Definition: hevc.h:65
ff_hevc_rem_intra_luma_pred_mode_decode
int ff_hevc_rem_intra_luma_pred_mode_decode(HEVCContext *s)
Definition: hevc_cabac.c:767
ff_hevc_frame_nb_refs
int ff_hevc_frame_nb_refs(const HEVCContext *s)
Get the number of candidate references for the current frame.
Definition: hevc_refs.c:511
hls_prediction_unit
static void hls_prediction_unit(HEVCContext *s, int x0, int y0, int nPbW, int nPbH, int log2_cb_size, int partIdx, int idx)
Definition: hevcdec.c:1861
HEVCLocalContext::boundary_flags
int boundary_flags
Definition: hevcdec.h:464
HEVCSEIFilmGrainCharacteristics::comp_model_value
int16_t comp_model_value[3][256][6]
Definition: hevc_sei.h:134
HEVC_NAL_TRAIL_N
@ HEVC_NAL_TRAIL_N
Definition: hevc.h:29
LongTermRPS
Definition: hevcdec.h:229
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:105
SliceHeader::slice_type
enum HEVCSliceType slice_type
Definition: hevcdec.h:255
ff_hevc_flush_dpb
void ff_hevc_flush_dpb(HEVCContext *s)
Drop all frames currently in DPB.
Definition: hevc_refs.c:77
HEVC_NAL_AUD
@ HEVC_NAL_AUD
Definition: hevc.h:64
AV_FRAME_DATA_DYNAMIC_HDR_PLUS
@ AV_FRAME_DATA_DYNAMIC_HDR_PLUS
HDR dynamic metadata associated with a video frame.
Definition: frame.h:159
AVCodecContext
main external API structure.
Definition: avcodec.h:389
AVCodecContext::active_thread_type
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1482
SliceHeader::slice_qp
int8_t slice_qp
Definition: hevcdec.h:305
hls_coding_quadtree
static int hls_coding_quadtree(HEVCContext *s, int x0, int y0, int log2_cb_size, int cb_depth)
Definition: hevcdec.c:2338
AV_FILM_GRAIN_PARAMS_H274
@ AV_FILM_GRAIN_PARAMS_H274
The union is valid when interpreted as AVFilmGrainH274Params (codec.h274)
Definition: film_grain_params.h:35
HEVC_MAX_REFS
@ HEVC_MAX_REFS
Definition: hevc.h:119
av_mastering_display_metadata_create_side_data
AVMasteringDisplayMetadata * av_mastering_display_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVMasteringDisplayMetadata and add it to the frame.
Definition: mastering_display_metadata.c:32
SUBDIVIDE
#define SUBDIVIDE(x, y, idx)
PredictionUnit::merge_flag
uint8_t merge_flag
Definition: hevcdec.h:362
av_md5_alloc
struct AVMD5 * av_md5_alloc(void)
Allocate an AVMD5 context.
Definition: md5.c:48
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:56
AVRational::den
int den
Denominator.
Definition: rational.h:60
pred_weight_table
static int pred_weight_table(HEVCContext *s, GetBitContext *gb)
Definition: hevcdec.c:151
SliceHeader::slice_cr_qp_offset
int slice_cr_qp_offset
Definition: hevcdec.h:291
export_stream_params_from_sei
static int export_stream_params_from_sei(HEVCContext *s)
Definition: hevcdec.c:381
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
HEVCContext
Definition: hevcdec.h:467
AVCodecContext::profile
int profile
profile
Definition: avcodec.h:1547
CodingUnit::pred_mode
enum PredMode pred_mode
PredMode.
Definition: hevcdec.h:328
SliceHeader::pic_order_cnt_lsb
int pic_order_cnt_lsb
Definition: hevcdec.h:257
HEVCSEIFilmGrainCharacteristics::transfer_characteristics
int transfer_characteristics
Definition: hevc_sei.h:125
HEVCLocalContext::qPy_pred
int qPy_pred
Definition: hevcdec.h:437
ff_thread_get_format
FF_DISABLE_DEPRECATION_WARNINGS enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Wrapper around get_format() for frame-multithreaded codecs.
Definition: pthread_frame.c:1005
HEVCFrame::tab_mvf_buf
AVBufferRef * tab_mvf_buf
Definition: hevcdec.h:405
AVFilmGrainH274Params::log2_scale_factor
int log2_scale_factor
Specifies a scale factor used in the film grain characterization equations.
Definition: film_grain_params.h:165
SCAN_DIAG
@ SCAN_DIAG
Definition: hevcdec.h:224
SliceHeader::rpl_modification_flag
uint8_t rpl_modification_flag[2]
Definition: hevcdec.h:273
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:82
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
FF_CODEC_PROPERTY_CLOSED_CAPTIONS
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:1846
hevc_decode_frame
static int hevc_decode_frame(AVCodecContext *avctx, AVFrame *rframe, int *got_output, AVPacket *avpkt)
Definition: hevcdec.c:3477
av_md5_update
void av_md5_update(AVMD5 *ctx, const uint8_t *src, size_t len)
Update hash value.
Definition: md5.c:151
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilmGrainH274Params::num_model_values
uint8_t num_model_values[3]
Specifies the number of model values present for each intensity interval in which the film grain has ...
Definition: film_grain_params.h:182
HEVCLocalContext::tu
TransformUnit tu
Definition: hevcdec.h:439
hls_cross_component_pred
static int hls_cross_component_pred(HEVCContext *s, int idx)
Definition: hevcdec.c:1082
hls_slice_header
static int hls_slice_header(HEVCContext *s)
Definition: hevcdec.c:558
CodingUnit::y
int y
Definition: hevcdec.h:326
src0
const pixel *const src0
Definition: h264pred_template.c:420
set_side_data
static int set_side_data(HEVCContext *s)
Definition: hevcdec.c:2729
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:577
HEVCSEIFilmGrainCharacteristics::num_model_values
uint8_t num_model_values[3]
Definition: hevc_sei.h:131
desc
const char * desc
Definition: libsvtav1.c:83
Mv
Definition: hevcdec.h:337
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
HEVC_NAL_SPS
@ HEVC_NAL_SPS
Definition: hevc.h:62
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
PRED_L0
@ PRED_L0
Definition: hevcdec.h:159
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
get_bitsz
static av_always_inline int get_bitsz(GetBitContext *s, int n)
Read 0-25 bits.
Definition: get_bits.h:415
HEVCVPS
Definition: hevc_ps.h:123
VUI::video_signal_type_present_flag
int video_signal_type_present_flag
Definition: hevc_ps.h:55
mastering_display_metadata.h
get_ue_golomb_long
static unsigned get_ue_golomb_long(GetBitContext *gb)
Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1.
Definition: golomb.h:104
ff_hevc_sao_band_position_decode
int ff_hevc_sao_band_position_decode(HEVCContext *s)
Definition: hevc_cabac.c:583
EPEL_EXTRA
#define EPEL_EXTRA
Definition: hevcdec.h:62
ff_hevc_part_mode_decode
int ff_hevc_part_mode_decode(HEVCContext *s, int log2_cb_size)
Definition: hevc_cabac.c:712
s0
#define s0
Definition: regdef.h:37
av_stereo3d_create_side_data
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:34
HEVCSPS
Definition: hevc_ps.h:153
AVFilmGrainH274Params::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: film_grain_params.h:151
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:231
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
HEVCPPS
Definition: hevc_ps.h:249
CodingUnit::part_mode
enum PartMode part_mode
PartMode.
Definition: hevcdec.h:329
AVStereo3D::view
enum AVStereo3DView view
Determines which views are packed.
Definition: stereo3d.h:190
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
SliceHeader::tc_offset
int tc_offset
tc_offset_div2 * 2
Definition: hevcdec.h:296
LongTermRPS::nb_refs
uint8_t nb_refs
Definition: hevcdec.h:233
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:107
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:416
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
TransformUnit::cross_pf
uint8_t cross_pf
Definition: hevcdec.h:380
SAOParams::offset_val
int16_t offset_val[3][5]
SaoOffsetVal.
Definition: hevcdsp.h:42
HEVCLocalContext::cu
CodingUnit cu
Definition: hevcdec.h:454
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
av_fast_malloc
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:565
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
SliceHeader::pps_id
unsigned int pps_id
address (in raster order) of the first block in the current slice segment
Definition: hevcdec.h:248
ff_hevc_decoder
const FFCodec ff_hevc_decoder
Definition: hevcdec.c:3870
HWACCEL_VAAPI
#define HWACCEL_VAAPI(codec)
Definition: hwconfig.h:73
FFMAX3
#define FFMAX3(a, b, c)
Definition: macros.h:48
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:562
int32_t
int32_t
Definition: audioconvert.c:56
bytestream.h
ff_hevc_decode_short_term_rps
int ff_hevc_decode_short_term_rps(GetBitContext *gb, AVCodecContext *avctx, ShortTermRPS *rps, const HEVCSPS *sps, int is_slice_header)
Definition: hevc_ps.c:119
PredictionUnit::mpm_idx
int mpm_idx
Definition: hevcdec.h:358
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
VUI::video_full_range_flag
int video_full_range_flag
Definition: hevc_ps.h:57
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HEVC_NAL_FD_NUT
@ HEVC_NAL_FD_NUT
Definition: hevc.h:67
PredictionUnit::chroma_mode_c
uint8_t chroma_mode_c[4]
Definition: hevcdec.h:364
AVFilmGrainH274Params::bit_depth_chroma
int bit_depth_chroma
Specifies the bit depth used for the chroma components.
Definition: film_grain_params.h:147
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
skip_bytes
static const av_unused uint8_t * skip_bytes(CABACContext *c, int n)
Skip n bytes and reset the decoder.
Definition: cabac_functions.h:203
PredictionUnit::intra_pred_mode
uint8_t intra_pred_mode[4]
Definition: hevcdec.h:360
ff_hevc_decode_nal_pps
int ff_hevc_decode_nal_pps(GetBitContext *gb, AVCodecContext *avctx, HEVCParamSets *ps)
Definition: hevc_ps.c:1496
TransformUnit::is_cu_chroma_qp_offset_coded
uint8_t is_cu_chroma_qp_offset_coded
Definition: hevcdec.h:377
hls_transform_tree
static int hls_transform_tree(HEVCContext *s, int x0, int y0, int xBase, int yBase, int cb_xBase, int cb_yBase, int log2_cb_size, int log2_trafo_size, int trafo_depth, int blk_idx, const int *base_cbf_cb, const int *base_cbf_cr)
Definition: hevcdec.c:1319
h
h
Definition: vp9dsp_template.c:2038
HEVCSEIFilmGrainCharacteristics::bit_depth_luma
int bit_depth_luma
Definition: hevc_sei.h:121
BOUNDARY_LEFT_SLICE
#define BOUNDARY_LEFT_SLICE
Definition: hevcdec.h:458
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:176
SliceHeader::slice_qp_delta
int slice_qp_delta
Definition: hevcdec.h:289
SliceHeader::slice_addr
unsigned int slice_addr
Definition: hevcdec.h:253
ff_hevc_log2_res_scale_abs
int ff_hevc_log2_res_scale_abs(HEVCContext *s, int idx)
Definition: hevc_cabac.c:903
HEVC_NAL_EOB_NUT
@ HEVC_NAL_EOB_NUT
Definition: hevc.h:66
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
TransformUnit::intra_pred_mode_c
int intra_pred_mode_c
Definition: hevcdec.h:374
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:50
HEVC_NAL_SEI_PREFIX
@ HEVC_NAL_SEI_PREFIX
Definition: hevc.h:68
int
int
Definition: ffmpeg_filter.c:153
HEVCSEIFilmGrainCharacteristics
Definition: hevc_sei.h:117
HEVCLocalContext::end_of_tiles_y
int end_of_tiles_y
Definition: hevcdec.h:446
AVFilmGrainParams::type
enum AVFilmGrainParamsType type
Specifies the codec for which this structure is valid.
Definition: film_grain_params.h:220
CodingUnit::intra_split_flag
uint8_t intra_split_flag
IntraSplitFlag.
Definition: hevcdec.h:332
ff_hevc_end_of_slice_flag_decode
int ff_hevc_end_of_slice_flag_decode(HEVCContext *s)
Definition: hevc_cabac.c:615
intra_prediction_unit
static void intra_prediction_unit(HEVCContext *s, int x0, int y0, int log2_cb_size)
Definition: hevcdec.c:2084
SHIFT_CTB_WPP
#define SHIFT_CTB_WPP
Definition: hevcdec.h:46
av_color_transfer_name
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:3029
luma_mc_uni
static void luma_mc_uni(HEVCContext *s, uint8_t *dst, ptrdiff_t dststride, AVFrame *ref, const Mv *mv, int x_off, int y_off, int block_w, int block_h, int luma_weight, int luma_offset)
8.5.3.2.2.1 Luma sample unidirectional interpolation process
Definition: hevcdec.c:1499
PART_2NxN
@ PART_2NxN
Definition: hevcdec.h:143
HEVCParamSets::vps_list
AVBufferRef * vps_list[HEVC_MAX_VPS_COUNT]
Definition: hevc_ps.h:328
ff_dovi_attach_side_data
int ff_dovi_attach_side_data(DOVIContext *s, AVFrame *frame)
Attach the decoded AVDOVIMetadata as side data to an AVFrame.
Definition: dovi_rpu.c:91
AV_CODEC_EXPORT_DATA_FILM_GRAIN
#define AV_CODEC_EXPORT_DATA_FILM_GRAIN
Decoding only.
Definition: avcodec.h:362
SliceHeader::long_term_rps
LongTermRPS long_term_rps
Definition: hevcdec.h:270
HEVCSEIFilmGrainCharacteristics::persistence_flag
int persistence_flag
Definition: hevc_sei.h:135
HEVCLocalContext::cc
CABACContext cc
Definition: hevcdec.h:432
TransformUnit::cu_qp_offset_cr
int8_t cu_qp_offset_cr
Definition: hevcdec.h:379
ff_alloc_entries
int ff_alloc_entries(AVCodecContext *avctx, int count)
Definition: pthread_slice.c:240
options
static const AVOption options[]
Definition: hevcdec.c:3855
AVDOVIDecoderConfigurationRecord
Definition: dovi_meta.h:52
HEVC_CONTEXTS
#define HEVC_CONTEXTS
Definition: hevcdec.h:52
HEVCParamSets
Definition: hevc_ps.h:327