FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vdpau.c
Go to the documentation of this file.
1 /*
2  * Video Decode and Presentation API for UNIX (VDPAU) is used for
3  * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
4  *
5  * Copyright (c) 2008 NVIDIA
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <limits.h>
25 #include "libavutil/avassert.h"
26 #include "avcodec.h"
27 #include "internal.h"
28 #include "h264.h"
29 #include "vc1.h"
30 
31 #undef NDEBUG
32 #include <assert.h>
33 
34 #include "vdpau.h"
35 #include "vdpau_compat.h"
36 #include "vdpau_internal.h"
37 
38 /**
39  * @addtogroup VDPAU_Decoding
40  *
41  * @{
42  */
43 
44 static int vdpau_error(VdpStatus status)
45 {
46  switch (status) {
47  case VDP_STATUS_OK:
48  return 0;
49  case VDP_STATUS_NO_IMPLEMENTATION:
50  return AVERROR(ENOSYS);
51  case VDP_STATUS_DISPLAY_PREEMPTED:
52  return AVERROR(EIO);
53  case VDP_STATUS_INVALID_HANDLE:
54  return AVERROR(EBADF);
55  case VDP_STATUS_INVALID_POINTER:
56  return AVERROR(EFAULT);
57  case VDP_STATUS_RESOURCES:
58  return AVERROR(ENOBUFS);
59  case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
60  return AVERROR(EXDEV);
61  case VDP_STATUS_ERROR:
62  return AVERROR(EIO);
63  default:
64  return AVERROR(EINVAL);
65  }
66 }
67 
69 {
70  return av_vdpau_alloc_context();
71 }
72 
73 MAKE_ACCESSORS(AVVDPAUContext, vdpau_hwaccel, AVVDPAU_Render2, render2)
74 
76  VdpChromaType *type,
77  uint32_t *width, uint32_t *height)
78 {
79  VdpChromaType t;
80  uint32_t w = avctx->coded_width;
81  uint32_t h = avctx->coded_height;
82 
83  /* See <vdpau/vdpau.h> for per-type alignment constraints. */
84  switch (avctx->sw_pix_fmt) {
85  case AV_PIX_FMT_YUV420P:
87  t = VDP_CHROMA_TYPE_420;
88  w = (w + 1) & ~1;
89  h = (h + 3) & ~3;
90  break;
91  case AV_PIX_FMT_YUV422P:
93  t = VDP_CHROMA_TYPE_422;
94  w = (w + 1) & ~1;
95  h = (h + 1) & ~1;
96  break;
97  case AV_PIX_FMT_YUV444P:
99  t = VDP_CHROMA_TYPE_444;
100  h = (h + 1) & ~1;
101  break;
102  default:
103  return AVERROR(ENOSYS);
104  }
105 
106  if (type)
107  *type = t;
108  if (width)
109  *width = w;
110  if (height)
111  *height = h;
112  return 0;
113 }
114 
115 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
116  int level)
117 {
118  VDPAUHWContext *hwctx = avctx->hwaccel_context;
119  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
120  VdpVideoSurfaceQueryCapabilities *surface_query_caps;
121  VdpDecoderQueryCapabilities *decoder_query_caps;
122  VdpDecoderCreate *create;
123  void *func;
124  VdpStatus status;
125  VdpBool supported;
126  uint32_t max_level, max_mb, max_width, max_height;
127  VdpChromaType type;
128  uint32_t width;
129  uint32_t height;
130 
131  vdctx->width = UINT32_MAX;
132  vdctx->height = UINT32_MAX;
133 
134  if (!hwctx) {
135  vdctx->device = VDP_INVALID_HANDLE;
136  av_log(avctx, AV_LOG_WARNING, "hwaccel_context has not been setup by the user application, cannot initialize\n");
137  return 0;
138  }
139 
140  if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
141  vdctx->decoder = hwctx->context.decoder;
142  vdctx->render = hwctx->context.render;
143  vdctx->device = VDP_INVALID_HANDLE;
144  return 0; /* Decoder created by user */
145  }
146  hwctx->reset = 0;
147 
148  vdctx->device = hwctx->device;
149  vdctx->get_proc_address = hwctx->get_proc_address;
150 
151  if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
152  level = 0;
153  else if (level < 0)
154  return AVERROR(ENOTSUP);
155 
156  if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
157  return AVERROR(ENOSYS);
158 
159  if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
160  type != VDP_CHROMA_TYPE_420)
161  return AVERROR(ENOSYS);
162 
163  status = vdctx->get_proc_address(vdctx->device,
164  VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
165  &func);
166  if (status != VDP_STATUS_OK)
167  return vdpau_error(status);
168  else
169  surface_query_caps = func;
170 
171  status = surface_query_caps(vdctx->device, type, &supported,
172  &max_width, &max_height);
173  if (status != VDP_STATUS_OK)
174  return vdpau_error(status);
175  if (supported != VDP_TRUE ||
176  max_width < width || max_height < height)
177  return AVERROR(ENOTSUP);
178 
179  status = vdctx->get_proc_address(vdctx->device,
180  VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
181  &func);
182  if (status != VDP_STATUS_OK)
183  return vdpau_error(status);
184  else
185  decoder_query_caps = func;
186 
187  status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
188  &max_mb, &max_width, &max_height);
189 #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
190  if (status != VDP_STATUS_OK && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
191  /* Run-time backward compatibility for libvdpau 0.8 and earlier */
192  profile = VDP_DECODER_PROFILE_H264_MAIN;
193  status = decoder_query_caps(vdctx->device, profile, &supported,
194  &max_level, &max_mb,
195  &max_width, &max_height);
196  }
197 #endif
198  if (status != VDP_STATUS_OK)
199  return vdpau_error(status);
200 
201  if (supported != VDP_TRUE || max_level < level ||
202  max_width < width || max_height < height)
203  return AVERROR(ENOTSUP);
204 
205  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
206  &func);
207  if (status != VDP_STATUS_OK)
208  return vdpau_error(status);
209  else
210  create = func;
211 
212  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
213  &func);
214  if (status != VDP_STATUS_OK)
215  return vdpau_error(status);
216  else
217  vdctx->render = func;
218 
219  status = create(vdctx->device, profile, width, height, avctx->refs,
220  &vdctx->decoder);
221  if (status == VDP_STATUS_OK) {
222  vdctx->width = avctx->coded_width;
223  vdctx->height = avctx->coded_height;
224  }
225 
226  return vdpau_error(status);
227 }
228 
230 {
231  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
232  VdpDecoderDestroy *destroy;
233  void *func;
234  VdpStatus status;
235 
236  if (vdctx->device == VDP_INVALID_HANDLE)
237  return 0; /* Decoder created and destroyed by user */
238  if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
239  return 0;
240 
241  status = vdctx->get_proc_address(vdctx->device,
242  VDP_FUNC_ID_DECODER_DESTROY, &func);
243  if (status != VDP_STATUS_OK)
244  return vdpau_error(status);
245  else
246  destroy = func;
247 
248  status = destroy(vdctx->decoder);
249  return vdpau_error(status);
250 }
251 
253 {
254  VDPAUHWContext *hwctx = avctx->hwaccel_context;
255  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
256 
257  if (vdctx->device == VDP_INVALID_HANDLE)
258  return 0; /* Decoder created by user */
259  if (avctx->coded_width == vdctx->width &&
260  avctx->coded_height == vdctx->height && !hwctx->reset)
261  return 0;
262 
263  avctx->hwaccel->uninit(avctx);
264  return avctx->hwaccel->init(avctx);
265 }
266 
267 int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx,
268  av_unused const uint8_t *buffer,
269  av_unused uint32_t size)
270 {
271  pic_ctx->bitstream_buffers_allocated = 0;
272  pic_ctx->bitstream_buffers_used = 0;
273  pic_ctx->bitstream_buffers = NULL;
274  return 0;
275 }
276 
278  struct vdpau_picture_context *pic_ctx)
279 {
280  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
281  AVVDPAUContext *hwctx = avctx->hwaccel_context;
282  VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
283  VdpStatus status;
284  int val;
285 
286  val = ff_vdpau_common_reinit(avctx);
287  if (val < 0)
288  return val;
289 
290 #if FF_API_BUFS_VDPAU
292  av_assert0(sizeof(hwctx->info) <= sizeof(pic_ctx->info));
293  memcpy(&hwctx->info, &pic_ctx->info, sizeof(hwctx->info));
294  hwctx->bitstream_buffers = pic_ctx->bitstream_buffers;
295  hwctx->bitstream_buffers_used = pic_ctx->bitstream_buffers_used;
296  hwctx->bitstream_buffers_allocated = pic_ctx->bitstream_buffers_allocated;
298 #endif
299 
300  if (!hwctx->render && hwctx->render2) {
301  status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info,
302  pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
303  } else
304  status = vdctx->render(vdctx->decoder, surf, (void *)&pic_ctx->info,
305  pic_ctx->bitstream_buffers_used,
306  pic_ctx->bitstream_buffers);
307 
308  av_freep(&pic_ctx->bitstream_buffers);
309 
310 #if FF_API_BUFS_VDPAU
312  hwctx->bitstream_buffers = NULL;
313  hwctx->bitstream_buffers_used = 0;
314  hwctx->bitstream_buffers_allocated = 0;
316 #endif
317 
318  return vdpau_error(status);
319 }
320 
321 #if CONFIG_H263_VDPAU_HWACCEL || CONFIG_MPEG1_VDPAU_HWACCEL || \
322  CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
323  CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL
325 {
326  MpegEncContext *s = avctx->priv_data;
327  Picture *pic = s->current_picture_ptr;
328  struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
329  int val;
330 
331  val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
332  if (val < 0)
333  return val;
334 
336  return 0;
337 }
338 #endif
339 
340 int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx,
341  const uint8_t *buf, uint32_t size)
342 {
343  VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
344 
345  buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
346  (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
347  if (!buffers)
348  return AVERROR(ENOMEM);
349 
350  pic_ctx->bitstream_buffers = buffers;
351  buffers += pic_ctx->bitstream_buffers_used++;
352 
353  buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
354  buffers->bitstream = buf;
355  buffers->bitstream_bytes = size;
356  return 0;
357 }
358 
359 /* Obsolete non-hwaccel VDPAU support below... */
360 
361 #if FF_API_VDPAU
362 void ff_vdpau_add_data_chunk(uint8_t *data, const uint8_t *buf, int buf_size)
363 {
364  struct vdpau_render_state *render = (struct vdpau_render_state*)data;
365  assert(render);
366 
368  render->bitstream_buffers,
370  sizeof(*render->bitstream_buffers)*(render->bitstream_buffers_used + 1)
371  );
372 
373  render->bitstream_buffers[render->bitstream_buffers_used].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
374  render->bitstream_buffers[render->bitstream_buffers_used].bitstream = buf;
375  render->bitstream_buffers[render->bitstream_buffers_used].bitstream_bytes = buf_size;
376  render->bitstream_buffers_used++;
377 }
378 
379 #if CONFIG_H264_VDPAU_DECODER
381 {
382  struct vdpau_render_state *render, *render_ref;
383  VdpReferenceFrameH264 *rf, *rf2;
384  H264Picture *pic;
385  int i, list, pic_frame_idx;
386 
387  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
388  assert(render);
389 
390  rf = &render->info.h264.referenceFrames[0];
391 #define H264_RF_COUNT FF_ARRAY_ELEMS(render->info.h264.referenceFrames)
392 
393  for (list = 0; list < 2; ++list) {
394  H264Picture **lp = list ? h->long_ref : h->short_ref;
395  int ls = list ? 16 : h->short_ref_count;
396 
397  for (i = 0; i < ls; ++i) {
398  pic = lp[i];
399  if (!pic || !pic->reference)
400  continue;
401  pic_frame_idx = pic->long_ref ? pic->pic_id : pic->frame_num;
402 
403  render_ref = (struct vdpau_render_state *)pic->f->data[0];
404  assert(render_ref);
405 
406  rf2 = &render->info.h264.referenceFrames[0];
407  while (rf2 != rf) {
408  if (
409  (rf2->surface == render_ref->surface)
410  && (rf2->is_long_term == pic->long_ref)
411  && (rf2->frame_idx == pic_frame_idx)
412  )
413  break;
414  ++rf2;
415  }
416  if (rf2 != rf) {
417  rf2->top_is_reference |= (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
418  rf2->bottom_is_reference |= (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
419  continue;
420  }
421 
422  if (rf >= &render->info.h264.referenceFrames[H264_RF_COUNT])
423  continue;
424 
425  rf->surface = render_ref->surface;
426  rf->is_long_term = pic->long_ref;
427  rf->top_is_reference = (pic->reference & PICT_TOP_FIELD) ? VDP_TRUE : VDP_FALSE;
428  rf->bottom_is_reference = (pic->reference & PICT_BOTTOM_FIELD) ? VDP_TRUE : VDP_FALSE;
429  rf->field_order_cnt[0] = pic->field_poc[0];
430  rf->field_order_cnt[1] = pic->field_poc[1];
431  rf->frame_idx = pic_frame_idx;
432 
433  ++rf;
434  }
435  }
436 
437  for (; rf < &render->info.h264.referenceFrames[H264_RF_COUNT]; ++rf) {
438  rf->surface = VDP_INVALID_HANDLE;
439  rf->is_long_term = 0;
440  rf->top_is_reference = 0;
441  rf->bottom_is_reference = 0;
442  rf->field_order_cnt[0] = 0;
443  rf->field_order_cnt[1] = 0;
444  rf->frame_idx = 0;
445  }
446 }
447 
449 {
450  struct vdpau_render_state *render;
451  int i;
452 
453  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
454  assert(render);
455 
456  for (i = 0; i < 2; ++i) {
457  int foc = h->cur_pic_ptr->field_poc[i];
458  if (foc == INT_MAX)
459  foc = 0;
460  render->info.h264.field_order_cnt[i] = foc;
461  }
462 
463  render->info.h264.frame_num = h->frame_num;
464 }
465 
467 {
468  struct vdpau_render_state *render;
469 
470  render = (struct vdpau_render_state *)h->cur_pic_ptr->f->data[0];
471  assert(render);
472 
473  render->info.h264.slice_count = h->current_slice;
474  if (render->info.h264.slice_count < 1)
475  return;
476 
477  render->info.h264.is_reference = (h->cur_pic_ptr->reference & 3) ? VDP_TRUE : VDP_FALSE;
478  render->info.h264.field_pic_flag = h->picture_structure != PICT_FRAME;
479  render->info.h264.bottom_field_flag = h->picture_structure == PICT_BOTTOM_FIELD;
480  render->info.h264.num_ref_frames = h->sps.ref_frame_count;
481  render->info.h264.mb_adaptive_frame_field_flag = h->sps.mb_aff && !render->info.h264.field_pic_flag;
482  render->info.h264.constrained_intra_pred_flag = h->pps.constrained_intra_pred;
483  render->info.h264.weighted_pred_flag = h->pps.weighted_pred;
484  render->info.h264.weighted_bipred_idc = h->pps.weighted_bipred_idc;
485  render->info.h264.frame_mbs_only_flag = h->sps.frame_mbs_only_flag;
486  render->info.h264.transform_8x8_mode_flag = h->pps.transform_8x8_mode;
487  render->info.h264.chroma_qp_index_offset = h->pps.chroma_qp_index_offset[0];
488  render->info.h264.second_chroma_qp_index_offset = h->pps.chroma_qp_index_offset[1];
489  render->info.h264.pic_init_qp_minus26 = h->pps.init_qp - 26;
490  render->info.h264.num_ref_idx_l0_active_minus1 = h->pps.ref_count[0] - 1;
491  render->info.h264.num_ref_idx_l1_active_minus1 = h->pps.ref_count[1] - 1;
492  render->info.h264.log2_max_frame_num_minus4 = h->sps.log2_max_frame_num - 4;
493  render->info.h264.pic_order_cnt_type = h->sps.poc_type;
494  render->info.h264.log2_max_pic_order_cnt_lsb_minus4 = h->sps.poc_type ? 0 : h->sps.log2_max_poc_lsb - 4;
495  render->info.h264.delta_pic_order_always_zero_flag = h->sps.delta_pic_order_always_zero_flag;
496  render->info.h264.direct_8x8_inference_flag = h->sps.direct_8x8_inference_flag;
497  render->info.h264.entropy_coding_mode_flag = h->pps.cabac;
498  render->info.h264.pic_order_present_flag = h->pps.pic_order_present;
499  render->info.h264.deblocking_filter_control_present_flag = h->pps.deblocking_filter_parameters_present;
500  render->info.h264.redundant_pic_cnt_present_flag = h->pps.redundant_pic_cnt_present;
501  memcpy(render->info.h264.scaling_lists_4x4, h->pps.scaling_matrix4, sizeof(render->info.h264.scaling_lists_4x4));
502  memcpy(render->info.h264.scaling_lists_8x8[0], h->pps.scaling_matrix8[0], sizeof(render->info.h264.scaling_lists_8x8[0]));
503  memcpy(render->info.h264.scaling_lists_8x8[1], h->pps.scaling_matrix8[3], sizeof(render->info.h264.scaling_lists_8x8[0]));
504 
505  ff_h264_draw_horiz_band(h, &h->slice_ctx[0], 0, h->avctx->height);
506  render->bitstream_buffers_used = 0;
507 }
508 #endif /* CONFIG_H264_VDPAU_DECODER */
509 
510 #if CONFIG_MPEG_VDPAU_DECODER || CONFIG_MPEG1_VDPAU_DECODER
512  int buf_size, int slice_count)
513 {
514  struct vdpau_render_state *render, *last, *next;
515  int i;
516 
517  if (!s->current_picture_ptr) return;
518 
519  render = (struct vdpau_render_state *)s->current_picture_ptr->f->data[0];
520  assert(render);
521 
522  /* fill VdpPictureInfoMPEG1Or2 struct */
523  render->info.mpeg.picture_structure = s->picture_structure;
524  render->info.mpeg.picture_coding_type = s->pict_type;
525  render->info.mpeg.intra_dc_precision = s->intra_dc_precision;
526  render->info.mpeg.frame_pred_frame_dct = s->frame_pred_frame_dct;
527  render->info.mpeg.concealment_motion_vectors = s->concealment_motion_vectors;
528  render->info.mpeg.intra_vlc_format = s->intra_vlc_format;
529  render->info.mpeg.alternate_scan = s->alternate_scan;
530  render->info.mpeg.q_scale_type = s->q_scale_type;
531  render->info.mpeg.top_field_first = s->top_field_first;
532  render->info.mpeg.full_pel_forward_vector = s->full_pel[0]; // MPEG-1 only. Set 0 for MPEG-2
533  render->info.mpeg.full_pel_backward_vector = s->full_pel[1]; // MPEG-1 only. Set 0 for MPEG-2
534  render->info.mpeg.f_code[0][0] = s->mpeg_f_code[0][0]; // For MPEG-1 fill both horiz. & vert.
535  render->info.mpeg.f_code[0][1] = s->mpeg_f_code[0][1];
536  render->info.mpeg.f_code[1][0] = s->mpeg_f_code[1][0];
537  render->info.mpeg.f_code[1][1] = s->mpeg_f_code[1][1];
538  for (i = 0; i < 64; ++i) {
539  render->info.mpeg.intra_quantizer_matrix[i] = s->intra_matrix[i];
540  render->info.mpeg.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
541  }
542 
543  render->info.mpeg.forward_reference = VDP_INVALID_HANDLE;
544  render->info.mpeg.backward_reference = VDP_INVALID_HANDLE;
545 
546  switch(s->pict_type){
547  case AV_PICTURE_TYPE_B:
548  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
549  assert(next);
550  render->info.mpeg.backward_reference = next->surface;
551  // no return here, going to set forward prediction
552  case AV_PICTURE_TYPE_P:
553  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
554  if (!last) // FIXME: Does this test make sense?
555  last = render; // predict second field from the first
556  render->info.mpeg.forward_reference = last->surface;
557  }
558 
559  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
560 
561  render->info.mpeg.slice_count = slice_count;
562 
563  if (slice_count)
565  render->bitstream_buffers_used = 0;
566 }
567 #endif /* CONFIG_MPEG_VDPAU_DECODER || CONFIG_MPEG1_VDPAU_DECODER */
568 
569 #if CONFIG_VC1_VDPAU_DECODER
571  int buf_size)
572 {
573  VC1Context *v = s->avctx->priv_data;
574  struct vdpau_render_state *render, *last, *next;
575 
576  render = (struct vdpau_render_state *)s->current_picture.f->data[0];
577  assert(render);
578 
579  /* fill LvPictureInfoVC1 struct */
580  render->info.vc1.frame_coding_mode = v->fcm ? v->fcm + 1 : 0;
581  render->info.vc1.postprocflag = v->postprocflag;
582  render->info.vc1.pulldown = v->broadcast;
583  render->info.vc1.interlace = v->interlace;
584  render->info.vc1.tfcntrflag = v->tfcntrflag;
585  render->info.vc1.finterpflag = v->finterpflag;
586  render->info.vc1.psf = v->psf;
587  render->info.vc1.dquant = v->dquant;
588  render->info.vc1.panscan_flag = v->panscanflag;
589  render->info.vc1.refdist_flag = v->refdist_flag;
590  render->info.vc1.quantizer = v->quantizer_mode;
591  render->info.vc1.extended_mv = v->extended_mv;
592  render->info.vc1.extended_dmv = v->extended_dmv;
593  render->info.vc1.overlap = v->overlap;
594  render->info.vc1.vstransform = v->vstransform;
595  render->info.vc1.loopfilter = v->s.loop_filter;
596  render->info.vc1.fastuvmc = v->fastuvmc;
597  render->info.vc1.range_mapy_flag = v->range_mapy_flag;
598  render->info.vc1.range_mapy = v->range_mapy;
599  render->info.vc1.range_mapuv_flag = v->range_mapuv_flag;
600  render->info.vc1.range_mapuv = v->range_mapuv;
601  /* Specific to simple/main profile only */
602  render->info.vc1.multires = v->multires;
603  render->info.vc1.syncmarker = v->resync_marker;
604  render->info.vc1.rangered = v->rangered | (v->rangeredfrm << 1);
605  render->info.vc1.maxbframes = v->s.max_b_frames;
606 
607  render->info.vc1.deblockEnable = v->postprocflag & 1;
608  render->info.vc1.pquant = v->pq;
609 
610  render->info.vc1.forward_reference = VDP_INVALID_HANDLE;
611  render->info.vc1.backward_reference = VDP_INVALID_HANDLE;
612 
613  if (v->bi_type)
614  render->info.vc1.picture_type = 4;
615  else
616  render->info.vc1.picture_type = s->pict_type - 1 + s->pict_type / 3;
617 
618  switch(s->pict_type){
619  case AV_PICTURE_TYPE_B:
620  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
621  assert(next);
622  render->info.vc1.backward_reference = next->surface;
623  // no break here, going to set forward prediction
624  case AV_PICTURE_TYPE_P:
625  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
626  if (!last) // FIXME: Does this test make sense?
627  last = render; // predict second field from the first
628  render->info.vc1.forward_reference = last->surface;
629  }
630 
631  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
632 
633  render->info.vc1.slice_count = 1;
634 
636  render->bitstream_buffers_used = 0;
637 }
638 #endif /* (CONFIG_VC1_VDPAU_DECODER */
639 
640 #if CONFIG_MPEG4_VDPAU_DECODER
642  int buf_size)
643 {
644  MpegEncContext *s = &ctx->m;
645  struct vdpau_render_state *render, *last, *next;
646  int i;
647 
648  if (!s->current_picture_ptr) return;
649 
650  render = (struct vdpau_render_state *)s->current_picture_ptr->f->data[0];
651  assert(render);
652 
653  /* fill VdpPictureInfoMPEG4Part2 struct */
654  render->info.mpeg4.trd[0] = s->pp_time;
655  render->info.mpeg4.trb[0] = s->pb_time;
656  render->info.mpeg4.trd[1] = s->pp_field_time >> 1;
657  render->info.mpeg4.trb[1] = s->pb_field_time >> 1;
658  render->info.mpeg4.vop_time_increment_resolution = s->avctx->time_base.den;
659  render->info.mpeg4.vop_coding_type = 0;
660  render->info.mpeg4.vop_fcode_forward = s->f_code;
661  render->info.mpeg4.vop_fcode_backward = s->b_code;
662  render->info.mpeg4.resync_marker_disable = !ctx->resync_marker;
663  render->info.mpeg4.interlaced = !s->progressive_sequence;
664  render->info.mpeg4.quant_type = s->mpeg_quant;
665  render->info.mpeg4.quarter_sample = s->quarter_sample;
666  render->info.mpeg4.short_video_header = s->avctx->codec->id == AV_CODEC_ID_H263;
667  render->info.mpeg4.rounding_control = s->no_rounding;
668  render->info.mpeg4.alternate_vertical_scan_flag = s->alternate_scan;
669  render->info.mpeg4.top_field_first = s->top_field_first;
670  for (i = 0; i < 64; ++i) {
671  render->info.mpeg4.intra_quantizer_matrix[i] = s->intra_matrix[i];
672  render->info.mpeg4.non_intra_quantizer_matrix[i] = s->inter_matrix[i];
673  }
674  render->info.mpeg4.forward_reference = VDP_INVALID_HANDLE;
675  render->info.mpeg4.backward_reference = VDP_INVALID_HANDLE;
676 
677  switch (s->pict_type) {
678  case AV_PICTURE_TYPE_B:
679  next = (struct vdpau_render_state *)s->next_picture.f->data[0];
680  assert(next);
681  render->info.mpeg4.backward_reference = next->surface;
682  render->info.mpeg4.vop_coding_type = 2;
683  // no break here, going to set forward prediction
684  case AV_PICTURE_TYPE_P:
685  last = (struct vdpau_render_state *)s->last_picture.f->data[0];
686  assert(last);
687  render->info.mpeg4.forward_reference = last->surface;
688  }
689 
690  ff_vdpau_add_data_chunk(s->current_picture_ptr->f->data[0], buf, buf_size);
691 
693  render->bitstream_buffers_used = 0;
694 }
695 #endif /* CONFIG_MPEG4_VDPAU_DECODER */
696 #endif /* FF_API_VDPAU */
697 
698 int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
699 {
700 #define PROFILE(prof) \
701 do { \
702  *profile = VDP_DECODER_PROFILE_##prof; \
703  return 0; \
704 } while (0)
705 
706  switch (avctx->codec_id) {
707  case AV_CODEC_ID_MPEG1VIDEO: PROFILE(MPEG1);
709  switch (avctx->profile) {
710  case FF_PROFILE_MPEG2_MAIN: PROFILE(MPEG2_MAIN);
711  case FF_PROFILE_MPEG2_SIMPLE: PROFILE(MPEG2_SIMPLE);
712  default: return AVERROR(EINVAL);
713  }
714  case AV_CODEC_ID_H263: PROFILE(MPEG4_PART2_ASP);
715  case AV_CODEC_ID_MPEG4:
716  switch (avctx->profile) {
717  case FF_PROFILE_MPEG4_SIMPLE: PROFILE(MPEG4_PART2_SP);
718  case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP);
719  default: return AVERROR(EINVAL);
720  }
721  case AV_CODEC_ID_H264:
722  switch (avctx->profile & ~FF_PROFILE_H264_INTRA) {
723  case FF_PROFILE_H264_BASELINE: PROFILE(H264_BASELINE);
725  case FF_PROFILE_H264_MAIN: PROFILE(H264_MAIN);
726  case FF_PROFILE_H264_HIGH: PROFILE(H264_HIGH);
727 #ifdef VDP_DECODER_PROFILE_H264_EXTENDED
728  case FF_PROFILE_H264_EXTENDED: PROFILE(H264_EXTENDED);
729 #endif
730  default: return AVERROR(EINVAL);
731  }
732  case AV_CODEC_ID_WMV3:
733  case AV_CODEC_ID_VC1:
734  switch (avctx->profile) {
735  case FF_PROFILE_VC1_SIMPLE: PROFILE(VC1_SIMPLE);
736  case FF_PROFILE_VC1_MAIN: PROFILE(VC1_MAIN);
737  case FF_PROFILE_VC1_ADVANCED: PROFILE(VC1_ADVANCED);
738  default: return AVERROR(EINVAL);
739  }
740  }
741  return AVERROR(EINVAL);
742 #undef PROFILE
743 }
744 
746 {
747  return av_mallocz(sizeof(AVVDPAUContext));
748 }
749 
750 int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
751  VdpGetProcAddress *get_proc, unsigned flags)
752 {
753  VDPAUHWContext *hwctx;
754 
756  return AVERROR(EINVAL);
757 
758  if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
759  return AVERROR(ENOMEM);
760 
761  hwctx = avctx->hwaccel_context;
762 
763  memset(hwctx, 0, sizeof(*hwctx));
764  hwctx->context.decoder = VDP_INVALID_HANDLE;
765  hwctx->device = device;
766  hwctx->get_proc_address = get_proc;
767  hwctx->flags = flags;
768  hwctx->reset = 1;
769  return 0;
770 }
771 
772 /* @}*/
static struct ResampleContext * create(struct ResampleContext *c, int out_rate, int in_rate, int filter_size, int phase_shift, int linear, double cutoff, enum AVSampleFormat format, enum SwrFilterType filter_type, int kaiser_beta, double precision, int cheby)
Definition: soxr_resample.c:32
#define NULL
Definition: coverity.c:32
const struct AVCodec * codec
Definition: avcodec.h:1511
const char const char void * val
Definition: avisynth_c.h:634
void ff_vdpau_h264_picture_complete(H264Context *h)
float v
int long_ref
1->long term reference 0->short term reference
Definition: h264.h:335
const char * s
Definition: avisynth_c.h:631
The VC1 Context.
Definition: vc1.h:173
This structure describes decoded (raw) audio or video data.
Definition: frame.h:171
int(* init)(AVCodecContext *avctx)
Initialize the hwaccel private data.
Definition: avcodec.h:3694
VdpDevice device
Definition: ffmpeg_vdpau.c:38
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1696
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:68
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
int weighted_bipred_idc
Definition: h264.h:245
int chroma_qp_index_offset[2]
Definition: h264.h:248
int resync_marker
could this stream contain resync markers
Definition: mpeg4video.h:82
VdpDecoder decoder
Definition: ffmpeg_vdpau.c:39
int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx, av_unused const uint8_t *buffer, av_unused uint32_t size)
Definition: vdpau.c:267
int extended_mv
Ext MV in P/B (not in Simple)
Definition: vc1.h:223
#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE
Definition: avcodec.h:3181
int broadcast
TFF/RFF present.
Definition: vc1.h:200
unsigned int ref_count[2]
num_ref_idx_l0/1_active_minus1 + 1
Definition: h264.h:243
uint8_t rangeredfrm
Frame decoding info for S/M profiles only.
Definition: vc1.h:302
void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height)
Definition: h264.c:98
int frame_mbs_only_flag
Definition: h264.h:191
VdpPictureInfoMPEG1Or2 mpeg
Definition: vdpau.h:63
attribute_deprecated VdpBitstreamBuffer * bitstream_buffers
Table of bitstream buffers.
Definition: vdpau.h:137
H264Context.
Definition: h264.h:517
AVFrame * f
Definition: h264.h:310
Public libavcodec VDPAU header.
int fastuvmc
Rounding of qpel vector to hpel ? (not in Simple)
Definition: vc1.h:222
#define FF_PROFILE_H264_MAIN
Definition: avcodec.h:3149
H264Picture * long_ref[32]
Definition: h264.h:669
int profile
profile
Definition: avcodec.h:3115
int picture_structure
Definition: h264.h:590
AVVDPAUContext * av_vdpau_alloc_context(void)
Allocate an AVVDPAUContext.
Definition: vdpau.c:745
AVVDPAUContext * av_alloc_vdpaucontext(void)
allocation function for AVVDPAUContext
Definition: vdpau.c:68
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1631
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, VdpChromaType *type, uint32_t *width, uint32_t *height)
Gets the parameters to create an adequate VDPAU video surface for the codec context using VDPAU hardw...
Definition: vdpau.c:75
struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:2922
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2721
uint8_t scaling_matrix4[6][16]
Definition: h264.h:253
int deblocking_filter_parameters_present
deblocking_filter_parameters_present_flag
Definition: h264.h:249
int bi_type
Definition: vc1.h:381
int ff_vdpau_common_uninit(AVCodecContext *avctx)
Definition: vdpau.c:229
uint8_t
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:2934
int panscanflag
NUMPANSCANWIN, TOPLEFT{X,Y}, BOTRIGHT{X,Y} present.
Definition: vc1.h:203
#define FF_PROFILE_H264_EXTENDED
Definition: avcodec.h:3150
int interlace
Progressive/interlaced (RPTFTM syntax element)
Definition: vc1.h:201
#define FF_PROFILE_VC1_ADVANCED
Definition: avcodec.h:3164
void ff_vdpau_mpeg4_decode_picture(Mpeg4DecContext *s, const uint8_t *buf, int buf_size)
int cabac
entropy_coding_mode_flag
Definition: h264.h:239
#define FF_PROFILE_MPEG2_MAIN
Definition: avcodec.h:3141
int no_rounding
apply no rounding to motion compensation (MPEG4, msmpeg4, ...) for b-frames rounding mode is always 0...
Definition: mpegvideo.h:292
int full_pel[2]
Definition: mpegvideo.h:483
VdpGetProcAddress * get_proc_address
Definition: ffmpeg_vdpau.c:40
Picture current_picture
copy of the current picture structure.
Definition: mpegvideo.h:187
int intra_dc_precision
Definition: mpegvideo.h:465
static AVFrame * frame
attribute_deprecated int bitstream_buffers_used
Useful bitstream buffers in the bitstream buffers table.
Definition: vdpau.h:128
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:34
int av_reallocp(void *ptr, size_t size)
Allocate or reallocate a block of memory.
Definition: mem.c:187
int refdist_flag
REFDIST syntax element present in II, IP, PI or PP field picture headers.
Definition: vc1.h:204
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:76
int redundant_pic_cnt_present
redundant_pic_cnt_present_flag
Definition: h264.h:251
uint16_t pp_time
time distance between the last 2 p,s,i frames
Definition: mpegvideo.h:397
ptrdiff_t size
Definition: opengl_enc.c:101
#define av_log(a,...)
void ff_vdpau_mpeg_picture_complete(MpegEncContext *s, const uint8_t *buf, int buf_size, int slice_count)
int psf
Progressive Segmented Frame.
Definition: vc1.h:211
int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
Get a decoder profile that should be used for initializing a VDPAU decoder.
Definition: vdpau.c:698
MpegEncContext m
Definition: mpeg4video.h:66
H.264 / AVC / MPEG4 part10 codec.
int frame_num
Definition: h264.h:650
attribute_deprecated union AVVDPAUPictureInfo info
VDPAU picture information.
Definition: vdpau.h:112
enum AVCodecID id
Definition: avcodec.h:3486
#define AV_HWACCEL_FLAG_IGNORE_LEVEL
Hardware acceleration should be used for decoding even if the codec level used is unknown or higher t...
Definition: avcodec.h:3719
#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH
Hardware acceleration can output YUV pixel formats with a different chroma sampling than 4:2:0 and/or...
Definition: avcodec.h:3725
#define PROFILE(prof)
int mb_aff
mb_adaptive_frame_field_flag
Definition: h264.h:192
int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level)
Definition: vdpau.c:115
VdpBitstreamBuffer * bitstream_buffers
The user is responsible for freeing this buffer using av_freep().
Definition: vdpau.h:244
int overlap
overlapped transforms in use
Definition: vc1.h:226
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given block if it is not large enough, otherwise do nothing.
Definition: mem.c:480
This structure is used to share data between the libavcodec library and the client video application...
Definition: vdpau.h:90
int poc_type
pic_order_cnt_type
Definition: h264.h:181
int profile
Definition: mxfenc.c:1806
int constrained_intra_pred
constrained_intra_pred_flag
Definition: h264.h:250
#define MAKE_ACCESSORS(str, name, type, field)
Definition: internal.h:86
#define AVERROR(e)
Definition: error.h:43
#define FF_PROFILE_MPEG2_SIMPLE
Definition: avcodec.h:3142
PPS pps
current pps
Definition: h264.h:577
simple assert() macros that are a bit more flexible than ISO C assert().
int weighted_pred
weighted_pred_flag
Definition: h264.h:244
#define PICT_TOP_FIELD
Definition: mpegutils.h:33
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:406
int frame_num
frame_num (raw frame_num from slice header)
Definition: h264.h:330
int resync_marker
could this stream contain resync markers
Definition: vc1.h:396
Libavcodec external API header.
#define FF_PROFILE_VC1_MAIN
Definition: avcodec.h:3162
int postprocflag
Per-frame processing suggestion flag present.
Definition: vc1.h:199
int delta_pic_order_always_zero_flag
Definition: h264.h:183
attribute_deprecated int bitstream_buffers_allocated
Allocated size of the bitstream_buffers table.
Definition: vdpau.h:120
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:67
uint8_t scaling_matrix8[6][64]
Definition: h264.h:254
int refs
number of reference frames
Definition: avcodec.h:2178
int intra_vlc_format
Definition: mpegvideo.h:470
void ff_vdpau_h264_picture_start(H264Context *h)
union AVVDPAUPictureInfo info
picture parameter information for all supported codecs
Definition: vdpau.h:248
int ref_frame_count
num_ref_frames
Definition: h264.h:187
int top_field_first
Definition: mpegvideo.h:467
#define FF_PROFILE_MPEG4_SIMPLE
Definition: avcodec.h:3166
int reference
Definition: h264.h:341
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:75
int tfcntrflag
TFCNTR present.
Definition: vc1.h:202
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:191
Picture.
Definition: mpegpicture.h:45
int alternate_scan
Definition: mpegvideo.h:471
void * hwaccel_picture_private
Hardware accelerator private data.
Definition: mpegpicture.h:77
SPS sps
current sps
Definition: h264.h:576
static int vdpau_error(VdpStatus status)
Definition: vdpau.c:44
int init_qp
pic_init_qp_minus26 + 26
Definition: h264.h:246
H264SliceContext * slice_ctx
Definition: h264.h:531
int direct_8x8_inference_flag
Definition: h264.h:193
uint8_t range_mapuv_flag
Definition: vc1.h:329
int mpeg_f_code[2][2]
Definition: mpegvideo.h:460
VdpPictureInfoMPEG4Part2 mpeg4
Definition: vdpau.h:65
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:107
int pic_order_present
pic_order_present_flag
Definition: h264.h:240
int rangered
RANGEREDFRM (range reduction) syntax element present at frame level.
Definition: vc1.h:189
int frame_pred_frame_dct
Definition: mpegvideo.h:466
static void destroy(struct ResampleContext **c)
Definition: soxr_resample.c:64
int finterpflag
INTERPFRM present.
Definition: vc1.h:228
uint16_t inter_matrix[64]
Definition: mpegvideo.h:310
AVCodecContext * avctx
Definition: h264.h:519
int concealment_motion_vectors
Definition: mpegvideo.h:468
enum AVCodecID codec_id
Definition: avcodec.h:1519
H264Picture * short_ref[32]
Definition: h264.h:668
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
int multires
frame-level RESPIC syntax element present
Definition: vc1.h:186
int field_poc[2]
top/bottom POC
Definition: h264.h:328
main external API structure.
Definition: avcodec.h:1502
int bitstream_buffers_used
Definition: vdpau.h:242
uint8_t range_mapy
Definition: vc1.h:330
void ff_vdpau_h264_set_reference_frames(H264Context *h)
int extended_dmv
Additional extended dmv range at P/B frame-level.
Definition: vc1.h:205
void * buf
Definition: avisynth_c.h:553
GLint GLenum type
Definition: opengl_enc.c:105
int progressive_sequence
Definition: mpegvideo.h:459
BYTE int const BYTE int int int height
Definition: avisynth_c.h:676
int bitstream_buffers_allocated
Describe size/location of the compressed video data.
Definition: vdpau.h:241
int coded_height
Definition: avcodec.h:1696
#define H264_RF_COUNT
struct AVFrame * f
Definition: mpegpicture.h:46
int(* func)(AVBPrint *dst, const char *in, const char *arg)
Definition: jacosubdec.c:67
H264Picture * cur_pic_ptr
Definition: h264.h:527
VdpDecoderRender * render
VDPAU decoder render callback.
Definition: vdpau.h:103
int quantizer_mode
2bits, quantizer mode used for sequence, see QUANT_*
Definition: vc1.h:227
int f_code
forward MV resolution
Definition: mpegvideo.h:245
int log2_max_poc_lsb
log2_max_pic_order_cnt_lsb_minus4
Definition: h264.h:182
#define FF_PROFILE_H264_INTRA
Definition: avcodec.h:3145
int max_b_frames
max number of b-frames for encoding
Definition: mpegvideo.h:122
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:219
int(* AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *, const VdpPictureInfo *, uint32_t, const VdpBitstreamBuffer *)
Definition: vdpau.h:72
int vstransform
variable-size [48]x[48] transform type + info
Definition: vc1.h:225
int transform_8x8_mode
transform_8x8_mode_flag
Definition: h264.h:252
static int flags
Definition: cpu.c:47
uint8_t range_mapuv
Definition: vc1.h:331
#define FF_PROFILE_VC1_SIMPLE
Definition: avcodec.h:3161
uint16_t pb_field_time
like above, just for interlaced
Definition: mpegvideo.h:400
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:182
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, VdpGetProcAddress *get_proc, unsigned flags)
Associate a VDPAU device with a codec context for hardware acceleration.
Definition: vdpau.c:750
uint8_t level
Definition: svq3.c:150
MpegEncContext s
Definition: vc1.h:174
MpegEncContext.
Definition: mpegvideo.h:88
struct AVCodecContext * avctx
Definition: mpegvideo.h:105
uint16_t pp_field_time
Definition: mpegvideo.h:399
uint8_t pq
Definition: vc1.h:238
#define FF_PROFILE_H264_HIGH
Definition: avcodec.h:3151
int pic_id
pic_num (short -> no wrap version of pic_num, pic_num & max_pic_num; long -> long_pic_num) ...
Definition: h264.h:333
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:63
AVVDPAU_Render2 render2
Definition: vdpau.h:139
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:79
common internal api header.
if(ret< 0)
Definition: vf_mcdeint.c:280
VdpPictureInfoH264 h264
Definition: vdpau.h:62
This structure is used as a callback between the FFmpeg decoder (vd_) and presentation (vo_) module...
Definition: vdpau.h:229
GLuint * buffers
Definition: opengl_enc.c:99
VdpPictureInfoVC1 vc1
Definition: vdpau.h:64
int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, const uint8_t *buf, uint32_t size)
Definition: vdpau.c:340
enum FrameCodingMode fcm
Frame decoding info for Advanced profile.
Definition: vc1.h:308
int log2_max_frame_num
log2_max_frame_num_minus4 + 4
Definition: h264.h:180
int(* uninit)(AVCodecContext *avctx)
Uninitialize the hwaccel private data.
Definition: avcodec.h:3702
void * hwaccel_priv_data
hwaccel-specific private data
Definition: internal.h:159
Picture last_picture
copy of the previous picture structure.
Definition: mpegvideo.h:169
Bi-dir predicted.
Definition: avutil.h:268
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:77
int den
denominator
Definition: rational.h:45
int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, struct vdpau_picture_context *pic_ctx)
Definition: vdpau.c:277
#define FF_PROFILE_H264_CONSTRAINED_BASELINE
Definition: avcodec.h:3148
void * priv_data
Definition: avcodec.h:1544
#define PICT_FRAME
Definition: mpegutils.h:35
int picture_structure
Definition: mpegvideo.h:463
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:80
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1552
#define FF_PROFILE_H264_BASELINE
Definition: avcodec.h:3147
Picture next_picture
copy of the next picture structure.
Definition: mpegvideo.h:175
int current_slice
current slice number, used to initialize slice_num of each thread/context
Definition: h264.h:692
uint8_t range_mapy_flag
Definition: vc1.h:328
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:308
#define av_freep(p)
int dquant
How qscale varies with MBs, 2bits (not in Simple)
Definition: vc1.h:224
void ff_vdpau_add_data_chunk(uint8_t *data, const uint8_t *buf, int buf_size)
int b_code
backward MV resolution for B Frames (mpeg4)
Definition: mpegvideo.h:246
void ff_vdpau_vc1_decode_picture(MpegEncContext *s, const uint8_t *buf, int buf_size)
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:252
static int ff_vdpau_common_reinit(AVCodecContext *avctx)
Definition: vdpau.c:252
VdpVideoSurface surface
Used as rendered surface, never changed.
Definition: vdpau.h:230
Predicted.
Definition: avutil.h:267
GLuint buffer
Definition: opengl_enc.c:102
#define av_unused
Definition: attributes.h:118
uint16_t pb_time
time distance between the last b and p,s,i frame
Definition: mpegvideo.h:398
static uintptr_t ff_vdpau_get_surface_id(AVFrame *pic)
Extract VdpVideoSurface from an AVFrame.
int short_ref_count
number of actual short term references
Definition: h264.h:683
static int width