FFmpeg
vdpau.c
Go to the documentation of this file.
1 /*
2  * Video Decode and Presentation API for UNIX (VDPAU) is used for
3  * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
4  *
5  * Copyright (c) 2008 NVIDIA
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <limits.h>
25 
26 #include "avcodec.h"
27 #include "decode.h"
28 #include "internal.h"
29 #include "h264dec.h"
30 #include "vc1.h"
31 #include "vdpau.h"
32 #include "vdpau_internal.h"
33 
34 // XXX: at the time of adding this ifdefery, av_assert* wasn't use outside.
35 // When dropping it, make sure other av_assert* were not added since then.
36 
37 /**
38  * @addtogroup VDPAU_Decoding
39  *
40  * @{
41  */
42 
43 static int vdpau_error(VdpStatus status)
44 {
45  switch (status) {
46  case VDP_STATUS_OK:
47  return 0;
48  case VDP_STATUS_NO_IMPLEMENTATION:
49  return AVERROR(ENOSYS);
50  case VDP_STATUS_DISPLAY_PREEMPTED:
51  return AVERROR(EIO);
52  case VDP_STATUS_INVALID_HANDLE:
53  return AVERROR(EBADF);
54  case VDP_STATUS_INVALID_POINTER:
55  return AVERROR(EFAULT);
56  case VDP_STATUS_RESOURCES:
57  return AVERROR(ENOBUFS);
58  case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
59  return AVERROR(EXDEV);
60  case VDP_STATUS_ERROR:
61  return AVERROR(EIO);
62  default:
63  return AVERROR(EINVAL);
64  }
65 }
66 
68 {
69  return av_vdpau_alloc_context();
70 }
71 
72 MAKE_ACCESSORS(AVVDPAUContext, vdpau_hwaccel, AVVDPAU_Render2, render2)
73 
75  VdpChromaType *type,
76  uint32_t *width, uint32_t *height)
77 {
78  VdpChromaType t;
79  uint32_t w = avctx->coded_width;
80  uint32_t h = avctx->coded_height;
81 
82  /* See <vdpau/vdpau.h> for per-type alignment constraints. */
83  switch (avctx->sw_pix_fmt) {
84  case AV_PIX_FMT_YUV420P:
88  t = VDP_CHROMA_TYPE_420;
89  w = (w + 1) & ~1;
90  h = (h + 3) & ~3;
91  break;
92  case AV_PIX_FMT_YUV422P:
94  t = VDP_CHROMA_TYPE_422;
95  w = (w + 1) & ~1;
96  h = (h + 1) & ~1;
97  break;
98  case AV_PIX_FMT_YUV444P:
102  t = VDP_CHROMA_TYPE_444;
103  h = (h + 1) & ~1;
104  break;
105  default:
106  return AVERROR(ENOSYS);
107  }
108 
109  if (type)
110  *type = t;
111  if (width)
112  *width = w;
113  if (height)
114  *height = h;
115  return 0;
116 }
117 
119  AVBufferRef *hw_frames_ctx)
120 {
121  AVHWFramesContext *hw_frames = (AVHWFramesContext*)hw_frames_ctx->data;
122  VdpChromaType type;
123  uint32_t width;
124  uint32_t height;
125 
127  return AVERROR(EINVAL);
128 
129  hw_frames->format = AV_PIX_FMT_VDPAU;
130  hw_frames->sw_format = avctx->sw_pix_fmt;
131  hw_frames->width = width;
132  hw_frames->height = height;
133 
134  return 0;
135 }
136 
137 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
138  int level)
139 {
140  VDPAUHWContext *hwctx = avctx->hwaccel_context;
141  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
142  VdpVideoSurfaceQueryCapabilities *surface_query_caps;
143  VdpDecoderQueryCapabilities *decoder_query_caps;
144  VdpDecoderCreate *create;
145  VdpGetInformationString *info;
146  const char *info_string;
147  void *func;
148  VdpStatus status;
149  VdpBool supported;
150  uint32_t max_level, max_mb, max_width, max_height;
151  VdpChromaType type;
152  uint32_t width;
153  uint32_t height;
154  int ret;
155 
156  vdctx->width = UINT32_MAX;
157  vdctx->height = UINT32_MAX;
158 
159  if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
160  return AVERROR(ENOSYS);
161 
162  if (hwctx) {
163  hwctx->reset = 0;
164 
165  if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
166  vdctx->decoder = hwctx->context.decoder;
167  vdctx->render = hwctx->context.render;
168  vdctx->device = VDP_INVALID_HANDLE;
169  return 0; /* Decoder created by user */
170  }
171 
172  vdctx->device = hwctx->device;
173  vdctx->get_proc_address = hwctx->get_proc_address;
174 
175  if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
176  level = 0;
177 
178  if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
179  type != VDP_CHROMA_TYPE_420)
180  return AVERROR(ENOSYS);
181  } else {
182  AVHWFramesContext *frames_ctx;
183  AVVDPAUDeviceContext *dev_ctx;
184 
186  if (ret < 0)
187  return ret;
188 
189  frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
190  dev_ctx = frames_ctx->device_ctx->hwctx;
191 
192  vdctx->device = dev_ctx->device;
193  vdctx->get_proc_address = dev_ctx->get_proc_address;
194 
196  level = 0;
197  }
198 
199  if (level < 0)
200  return AVERROR(ENOTSUP);
201 
202  status = vdctx->get_proc_address(vdctx->device,
203  VDP_FUNC_ID_GET_INFORMATION_STRING,
204  &func);
205  if (status != VDP_STATUS_OK)
206  return vdpau_error(status);
207  else
208  info = func;
209 
210  status = info(&info_string);
211  if (status != VDP_STATUS_OK)
212  return vdpau_error(status);
213  if (avctx->codec_id == AV_CODEC_ID_HEVC && strncmp(info_string, "NVIDIA ", 7) == 0 &&
215  int driver_version = 0;
216  sscanf(info_string, "NVIDIA VDPAU Driver Shared Library %d", &driver_version);
217  if (driver_version < 410) {
218  av_log(avctx, AV_LOG_VERBOSE, "HEVC with NVIDIA VDPAU drivers is buggy, skipping.\n");
219  return AVERROR(ENOTSUP);
220  }
221  }
222 
223  status = vdctx->get_proc_address(vdctx->device,
224  VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
225  &func);
226  if (status != VDP_STATUS_OK)
227  return vdpau_error(status);
228  else
229  surface_query_caps = func;
230 
231  status = surface_query_caps(vdctx->device, type, &supported,
232  &max_width, &max_height);
233  if (status != VDP_STATUS_OK)
234  return vdpau_error(status);
235  if (supported != VDP_TRUE ||
236  max_width < width || max_height < height)
237  return AVERROR(ENOTSUP);
238 
239  status = vdctx->get_proc_address(vdctx->device,
240  VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
241  &func);
242  if (status != VDP_STATUS_OK)
243  return vdpau_error(status);
244  else
245  decoder_query_caps = func;
246 
247  status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
248  &max_mb, &max_width, &max_height);
249 #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
250  if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
251  profile = VDP_DECODER_PROFILE_H264_MAIN;
252  status = decoder_query_caps(vdctx->device, profile, &supported,
253  &max_level, &max_mb,
254  &max_width, &max_height);
255  }
256 #endif
257  if (status != VDP_STATUS_OK)
258  return vdpau_error(status);
259 
260  if (supported != VDP_TRUE || max_level < level ||
261  max_width < width || max_height < height)
262  return AVERROR(ENOTSUP);
263 
264  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
265  &func);
266  if (status != VDP_STATUS_OK)
267  return vdpau_error(status);
268  else
269  create = func;
270 
271  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
272  &func);
273  if (status != VDP_STATUS_OK)
274  return vdpau_error(status);
275  else
276  vdctx->render = func;
277 
278  status = create(vdctx->device, profile, width, height, avctx->refs,
279  &vdctx->decoder);
280  if (status == VDP_STATUS_OK) {
281  vdctx->width = avctx->coded_width;
282  vdctx->height = avctx->coded_height;
283  }
284 
285  return vdpau_error(status);
286 }
287 
289 {
290  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
291  VdpDecoderDestroy *destroy;
292  void *func;
293  VdpStatus status;
294 
295  if (vdctx->device == VDP_INVALID_HANDLE)
296  return 0; /* Decoder created and destroyed by user */
297  if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
298  return 0;
299 
300  status = vdctx->get_proc_address(vdctx->device,
301  VDP_FUNC_ID_DECODER_DESTROY, &func);
302  if (status != VDP_STATUS_OK)
303  return vdpau_error(status);
304  else
305  destroy = func;
306 
307  status = destroy(vdctx->decoder);
308  return vdpau_error(status);
309 }
310 
312 {
313  VDPAUHWContext *hwctx = avctx->hwaccel_context;
314  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
315 
316  if (vdctx->device == VDP_INVALID_HANDLE)
317  return 0; /* Decoder created by user */
318  if (avctx->coded_width == vdctx->width &&
319  avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset))
320  return 0;
321 
322  avctx->hwaccel->uninit(avctx);
323  return avctx->hwaccel->init(avctx);
324 }
325 
327  av_unused const uint8_t *buffer,
328  av_unused uint32_t size)
329 {
330  pic_ctx->bitstream_buffers_allocated = 0;
331  pic_ctx->bitstream_buffers_used = 0;
332  pic_ctx->bitstream_buffers = NULL;
333  return 0;
334 }
335 
337  struct vdpau_picture_context *pic_ctx)
338 {
339  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
340  AVVDPAUContext *hwctx = avctx->hwaccel_context;
341  VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
342  VdpStatus status;
343  int val;
344 
345  val = ff_vdpau_common_reinit(avctx);
346  if (val < 0)
347  return val;
348 
349  if (hwctx && !hwctx->render && hwctx->render2) {
350  status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info,
351  pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
352  } else
353  status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info,
354  pic_ctx->bitstream_buffers_used,
355  pic_ctx->bitstream_buffers);
356 
357  av_freep(&pic_ctx->bitstream_buffers);
358 
359  return vdpau_error(status);
360 }
361 
362 #if CONFIG_MPEG1_VDPAU_HWACCEL || \
363  CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
364  CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL
366 {
367  MpegEncContext *s = avctx->priv_data;
368  Picture *pic = s->current_picture_ptr;
369  struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
370  int val;
371 
372  val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
373  if (val < 0)
374  return val;
375 
377  return 0;
378 }
379 #endif
380 
382  const uint8_t *buf, uint32_t size)
383 {
384  VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
385 
386  buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
387  (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
388  if (!buffers)
389  return AVERROR(ENOMEM);
390 
391  pic_ctx->bitstream_buffers = buffers;
392  buffers += pic_ctx->bitstream_buffers_used++;
393 
394  buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
395  buffers->bitstream = buf;
396  buffers->bitstream_bytes = size;
397  return 0;
398 }
399 
400 #if FF_API_VDPAU_PROFILE
401 int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
402 {
403 #define PROFILE(prof) \
404 do { \
405  *profile = VDP_DECODER_PROFILE_##prof; \
406  return 0; \
407 } while (0)
408 
409  switch (avctx->codec_id) {
410  case AV_CODEC_ID_MPEG1VIDEO: PROFILE(MPEG1);
412  switch (avctx->profile) {
413  case FF_PROFILE_MPEG2_MAIN: PROFILE(MPEG2_MAIN);
414  case FF_PROFILE_MPEG2_SIMPLE: PROFILE(MPEG2_SIMPLE);
415  default: return AVERROR(EINVAL);
416  }
417  case AV_CODEC_ID_H263: PROFILE(MPEG4_PART2_ASP);
418  case AV_CODEC_ID_MPEG4:
419  switch (avctx->profile) {
420  case FF_PROFILE_MPEG4_SIMPLE: PROFILE(MPEG4_PART2_SP);
421  case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP);
422  default: return AVERROR(EINVAL);
423  }
424  case AV_CODEC_ID_H264:
425  switch (avctx->profile & ~FF_PROFILE_H264_INTRA) {
426  case FF_PROFILE_H264_BASELINE: PROFILE(H264_BASELINE);
428  case FF_PROFILE_H264_MAIN: PROFILE(H264_MAIN);
429  case FF_PROFILE_H264_HIGH: PROFILE(H264_HIGH);
430 #ifdef VDP_DECODER_PROFILE_H264_EXTENDED
431  case FF_PROFILE_H264_EXTENDED: PROFILE(H264_EXTENDED);
432 #endif
433  default: return AVERROR(EINVAL);
434  }
435  case AV_CODEC_ID_WMV3:
436  case AV_CODEC_ID_VC1:
437  switch (avctx->profile) {
438  case FF_PROFILE_VC1_SIMPLE: PROFILE(VC1_SIMPLE);
439  case FF_PROFILE_VC1_MAIN: PROFILE(VC1_MAIN);
440  case FF_PROFILE_VC1_ADVANCED: PROFILE(VC1_ADVANCED);
441  default: return AVERROR(EINVAL);
442  }
443  }
444  return AVERROR(EINVAL);
445 #undef PROFILE
446 }
447 #endif /* FF_API_VDPAU_PROFILE */
448 
450 {
451  return av_mallocz(sizeof(VDPAUHWContext));
452 }
453 
454 int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
455  VdpGetProcAddress *get_proc, unsigned flags)
456 {
457  VDPAUHWContext *hwctx;
458 
460  return AVERROR(EINVAL);
461 
462  if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
463  return AVERROR(ENOMEM);
464 
465  hwctx = avctx->hwaccel_context;
466 
467  memset(hwctx, 0, sizeof(*hwctx));
468  hwctx->context.decoder = VDP_INVALID_HANDLE;
469  hwctx->device = device;
470  hwctx->get_proc_address = get_proc;
471  hwctx->flags = flags;
472  hwctx->reset = 1;
473  return 0;
474 }
475 
476 /* @}*/
#define FF_PROFILE_H264_MAIN
Definition: avcodec.h:1900
#define FF_PROFILE_MPEG4_SIMPLE
Definition: avcodec.h:1919
#define NULL
Definition: coverity.c:32
This struct is allocated as AVHWDeviceContext.hwctx.
#define FF_PROFILE_MPEG2_MAIN
Definition: avcodec.h:1892
This structure describes decoded (raw) audio or video data.
Definition: frame.h:308
VdpDevice device
VDPAU device handle.
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:714
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
VdpGetProcAddress * get_proc_address
VdpDecoder decoder
VDPAU decoder handle.
int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx, av_unused const uint8_t *buffer, av_unused uint32_t size)
Definition: vdpau.c:326
VdpGetProcAddress * get_proc_address
#define FF_PROFILE_H264_INTRA
Definition: avcodec.h:1896
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
int bitstream_buffers_used
Useful bitstream buffers in the bitstream buffers table.
GLint GLenum type
Definition: opengl_enc.c:104
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
Public libavcodec VDPAU header.
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1690
int profile
profile
Definition: avcodec.h:1859
AVVDPAUContext * av_vdpau_alloc_context(void)
Allocate an AVVDPAUContext.
Definition: vdpau.c:449
AVVDPAUContext * av_alloc_vdpaucontext(void)
allocation function for AVVDPAUContext
Definition: vdpau.c:67
int(* uninit)(AVCodecContext *avctx)
Uninitialize the hwaccel private data.
Definition: avcodec.h:2548
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, VdpChromaType *type, uint32_t *width, uint32_t *height)
Gets the parameters to create an adequate VDPAU video surface for the codec context using VDPAU hardw...
Definition: vdpau.c:74
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2247
VdpBitstreamBuffer * bitstream_buffers
Table of bitstream buffers.
#define FF_PROFILE_H264_BASELINE
Definition: avcodec.h:1898
int ff_vdpau_common_uninit(AVCodecContext *avctx)
Definition: vdpau.c:288
uint8_t
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:1702
VdpGetProcAddress * get_proc_address
VDPAU device driver.
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
#define height
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
VdpDecoder decoder
VDPAU decoder handle.
Definition: vdpau.h:87
#define FF_PROFILE_H264_EXTENDED
Definition: avcodec.h:1901
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
int(* init)(AVCodecContext *avctx)
Initialize the hwaccel private data.
Definition: avcodec.h:2540
ptrdiff_t size
Definition: opengl_enc.c:100
AVVDPAUContext context
#define av_log(a,...)
int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
Get a decoder profile that should be used for initializing a VDPAU decoder.
Definition: vdpau.c:401
VdpDevice device
int(* AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *, const VdpPictureInfo *, uint32_t, const VdpBitstreamBuffer *)
Definition: vdpau.h:63
#define AV_HWACCEL_FLAG_IGNORE_LEVEL
Hardware acceleration should be used for decoding even if the codec level used is unknown or higher t...
Definition: avcodec.h:2586
#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH
Hardware acceleration can output YUV pixel formats with a different chroma sampling than 4:2:0 and/or...
Definition: avcodec.h:2592
#define PROFILE(prof)
int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level)
Definition: vdpau.c:137
This structure is used to share data between the libavcodec library and the client video application...
Definition: vdpau.h:81
#define MAKE_ACCESSORS(str, name, type, field)
Definition: internal.h:91
#define FF_PROFILE_H264_HIGH
Definition: avcodec.h:1902
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
int bitstream_buffers_allocated
Allocated size of the bitstream_buffers table.
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
int refs
number of reference frames
Definition: avcodec.h:1114
MIPS optimizations info
Definition: mips.txt:2
uint32_t width
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
#define width
uint8_t w
Definition: llviddspenc.c:38
#define FF_PROFILE_VC1_MAIN
Definition: avcodec.h:1915
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:2226
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:184
Picture.
Definition: mpegpicture.h:45
void * hwaccel_picture_private
Hardware accelerator private data.
Definition: mpegpicture.h:77
static int vdpau_error(VdpStatus status)
Definition: vdpau.c:43
static struct ResampleContext * create(struct ResampleContext *c, int out_rate, int in_rate, int filter_size, int phase_shift, int linear, double cutoff, enum AVSampleFormat format, enum SwrFilterType filter_type, double kaiser_beta, double precision, int cheby, int exact_rational)
Definition: soxr_resample.c:32
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
H.264 / AVC / MPEG-4 part10 codec.
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:478
int ff_vdpau_common_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx)
Definition: vdpau.c:118
#define FF_PROFILE_VC1_SIMPLE
Definition: avcodec.h:1914
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:51
if(ret)
static void destroy(struct ResampleContext **c)
Definition: soxr_resample.c:64
int av_reallocp(void *ptr, size_t size)
Allocate, reallocate, or free a block of memory through a pointer to a pointer.
Definition: mem.c:161
Libavcodec external API header.
enum AVCodecID codec_id
Definition: avcodec.h:536
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
main external API structure.
Definition: avcodec.h:526
#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE
Definition: avcodec.h:1934
uint8_t * data
The data buffer.
Definition: buffer.h:89
VdpDecoderRender * render
VDPAU decoder render callback.
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
int coded_height
Definition: avcodec.h:714
int(* func)(AVBPrint *dst, const char *in, const char *arg)
Definition: jacosubdec.c:67
struct AVFrame * f
Definition: mpegpicture.h:46
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:197
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
VdpDecoderRender * render
VDPAU decoder render callback.
Definition: vdpau.h:94
mfxU16 profile
Definition: qsvenc.c:45
#define flags(name, subs,...)
Definition: cbs_av1.c:560
int ff_decode_get_hw_frames_ctx(AVCodecContext *avctx, enum AVHWDeviceType dev_type)
Make sure avctx.hw_frames_ctx is set.
Definition: decode.c:1143
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, VdpGetProcAddress *get_proc, unsigned flags)
Associate a VDPAU device with a codec context for hardware acceleration.
Definition: vdpau.c:454
uint8_t level
Definition: svq3.c:205
MpegEncContext.
Definition: mpegvideo.h:81
struct AVCodecContext * avctx
Definition: mpegvideo.h:98
A reference to a data buffer.
Definition: buffer.h:81
int
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AVVDPAU_Render2 render2
Definition: vdpau.h:96
common internal api header.
GLuint * buffers
Definition: opengl_enc.c:98
int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, const uint8_t *buf, uint32_t size)
Definition: vdpau.c:381
void * hwaccel_priv_data
hwaccel-specific private data
Definition: internal.h:169
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
unsigned char flags
int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, struct vdpau_picture_context *pic_ctx)
Definition: vdpau.c:336
void * priv_data
Definition: avcodec.h:553
#define FF_PROFILE_VC1_ADVANCED
Definition: avcodec.h:1917
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:561
union VDPAUPictureInfo info
VDPAU picture information.
#define FF_PROFILE_MPEG2_SIMPLE
Definition: avcodec.h:1893
#define av_freep(p)
#define AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH
Hardware acceleration should still be attempted for decoding when the codec profile does not match th...
Definition: avcodec.h:2606
int hwaccel_flags
Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active)...
Definition: avcodec.h:2287
#define FF_PROFILE_H264_CONSTRAINED_BASELINE
Definition: avcodec.h:1899
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
static double val(void *priv, double ch)
Definition: aeval.c:76
uint32_t height
static int ff_vdpau_common_reinit(AVCodecContext *avctx)
Definition: vdpau.c:311
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:2076
GLuint buffer
Definition: opengl_enc.c:101
#define av_unused
Definition: attributes.h:131
static uintptr_t ff_vdpau_get_surface_id(AVFrame *pic)
Extract VdpVideoSurface from an AVFrame.