FFmpeg
vdpau.c
Go to the documentation of this file.
1 /*
2  * Video Decode and Presentation API for UNIX (VDPAU) is used for
3  * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
4  *
5  * Copyright (c) 2008 NVIDIA
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include "config_components.h"
25 
26 #include <limits.h>
27 
28 #include "avcodec.h"
29 #include "decode.h"
30 #include "hwaccel_internal.h"
31 #include "internal.h"
32 #include "mpegvideodec.h"
33 #include "vc1.h"
34 #include "vdpau.h"
35 #include "vdpau_internal.h"
36 
37 // XXX: at the time of adding this ifdefery, av_assert* wasn't use outside.
38 // When dropping it, make sure other av_assert* were not added since then.
39 
40 /**
41  * @addtogroup VDPAU_Decoding
42  *
43  * @{
44  */
45 
46 static int vdpau_error(VdpStatus status)
47 {
48  switch (status) {
49  case VDP_STATUS_OK:
50  return 0;
51  case VDP_STATUS_NO_IMPLEMENTATION:
52  return AVERROR(ENOSYS);
53  case VDP_STATUS_DISPLAY_PREEMPTED:
54  return AVERROR(EIO);
55  case VDP_STATUS_INVALID_HANDLE:
56  return AVERROR(EBADF);
57  case VDP_STATUS_INVALID_POINTER:
58  return AVERROR(EFAULT);
59  case VDP_STATUS_RESOURCES:
60  return AVERROR(ENOBUFS);
61  case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
62  return AVERROR(EXDEV);
63  case VDP_STATUS_ERROR:
64  return AVERROR(EIO);
65  default:
66  return AVERROR(EINVAL);
67  }
68 }
69 
71 {
72  return av_vdpau_alloc_context();
73 }
74 
75 #define MAKE_ACCESSORS(str, name, type, field) \
76  type av_##name##_get_##field(const str *s) { return s->field; } \
77  void av_##name##_set_##field(str *s, type v) { s->field = v; }
78 MAKE_ACCESSORS(AVVDPAUContext, vdpau_hwaccel, AVVDPAU_Render2, render2)
79 
81  VdpChromaType *type,
82  uint32_t *width, uint32_t *height)
83 {
84  VdpChromaType t;
85  uint32_t w = avctx->coded_width;
86  uint32_t h = avctx->coded_height;
87 
88  /* See <vdpau/vdpau.h> for per-type alignment constraints. */
89  switch (avctx->sw_pix_fmt) {
90  case AV_PIX_FMT_YUV420P:
94  t = VDP_CHROMA_TYPE_420;
95  w = (w + 1) & ~1;
96  h = (h + 3) & ~3;
97  break;
98  case AV_PIX_FMT_YUV422P:
100  t = VDP_CHROMA_TYPE_422;
101  w = (w + 1) & ~1;
102  h = (h + 1) & ~1;
103  break;
104  case AV_PIX_FMT_YUV444P:
105  case AV_PIX_FMT_YUVJ444P:
108  t = VDP_CHROMA_TYPE_444;
109  h = (h + 1) & ~1;
110  break;
111  default:
112  return AVERROR(ENOSYS);
113  }
114 
115  if (type)
116  *type = t;
117  if (width)
118  *width = w;
119  if (height)
120  *height = h;
121  return 0;
122 }
123 
125  AVBufferRef *hw_frames_ctx)
126 {
127  AVHWFramesContext *hw_frames = (AVHWFramesContext*)hw_frames_ctx->data;
128  VdpChromaType type;
129  uint32_t width;
130  uint32_t height;
131 
133  return AVERROR(EINVAL);
134 
135  hw_frames->format = AV_PIX_FMT_VDPAU;
136  hw_frames->sw_format = avctx->sw_pix_fmt;
137  hw_frames->width = width;
138  hw_frames->height = height;
139 
140  return 0;
141 }
142 
143 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
144  int level)
145 {
146  VDPAUHWContext *hwctx = avctx->hwaccel_context;
147  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
148  VdpVideoSurfaceQueryCapabilities *surface_query_caps;
149  VdpDecoderQueryCapabilities *decoder_query_caps;
150  VdpDecoderCreate *create;
151  VdpGetInformationString *info;
152  const char *info_string;
153  void *func;
154  VdpStatus status;
155  VdpBool supported;
156  uint32_t max_level, max_mb, max_width, max_height;
157  VdpChromaType type;
158  uint32_t width;
159  uint32_t height;
160  int ret;
161 
162  vdctx->width = UINT32_MAX;
163  vdctx->height = UINT32_MAX;
164 
166  return AVERROR(ENOSYS);
167 
168  if (hwctx) {
169  hwctx->reset = 0;
170 
171  if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
172  vdctx->decoder = hwctx->context.decoder;
173  vdctx->render = hwctx->context.render;
174  vdctx->device = VDP_INVALID_HANDLE;
175  return 0; /* Decoder created by user */
176  }
177 
178  vdctx->device = hwctx->device;
179  vdctx->get_proc_address = hwctx->get_proc_address;
180 
181  if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
182  level = 0;
183 
184  if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
185  type != VDP_CHROMA_TYPE_420)
186  return AVERROR(ENOSYS);
187  } else {
188  AVHWFramesContext *frames_ctx;
189  AVVDPAUDeviceContext *dev_ctx;
190 
192  if (ret < 0)
193  return ret;
194 
195  frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
196  dev_ctx = frames_ctx->device_ctx->hwctx;
197 
198  vdctx->device = dev_ctx->device;
199  vdctx->get_proc_address = dev_ctx->get_proc_address;
200 
202  level = 0;
203  }
204 
205  if (level < 0)
206  return AVERROR(ENOTSUP);
207 
208  status = vdctx->get_proc_address(vdctx->device,
209  VDP_FUNC_ID_GET_INFORMATION_STRING,
210  &func);
211  if (status != VDP_STATUS_OK)
212  return vdpau_error(status);
213  else
214  info = func;
215 
216  status = info(&info_string);
217  if (status != VDP_STATUS_OK)
218  return vdpau_error(status);
219  if (avctx->codec_id == AV_CODEC_ID_HEVC && strncmp(info_string, "NVIDIA ", 7) == 0 &&
221  int driver_version = 0;
222  sscanf(info_string, "NVIDIA VDPAU Driver Shared Library %d", &driver_version);
223  if (driver_version < 410) {
224  av_log(avctx, AV_LOG_VERBOSE, "HEVC with NVIDIA VDPAU drivers is buggy, skipping.\n");
225  return AVERROR(ENOTSUP);
226  }
227  }
228 
229  status = vdctx->get_proc_address(vdctx->device,
230  VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
231  &func);
232  if (status != VDP_STATUS_OK)
233  return vdpau_error(status);
234  else
235  surface_query_caps = func;
236 
237  status = surface_query_caps(vdctx->device, type, &supported,
238  &max_width, &max_height);
239  if (status != VDP_STATUS_OK)
240  return vdpau_error(status);
241  if (supported != VDP_TRUE ||
242  max_width < width || max_height < height)
243  return AVERROR(ENOTSUP);
244 
245  status = vdctx->get_proc_address(vdctx->device,
246  VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
247  &func);
248  if (status != VDP_STATUS_OK)
249  return vdpau_error(status);
250  else
251  decoder_query_caps = func;
252 
253  status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
254  &max_mb, &max_width, &max_height);
255 #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
256  if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
257  profile = VDP_DECODER_PROFILE_H264_MAIN;
258  status = decoder_query_caps(vdctx->device, profile, &supported,
259  &max_level, &max_mb,
260  &max_width, &max_height);
261  }
262 #endif
263  if (status != VDP_STATUS_OK)
264  return vdpau_error(status);
265 
266  if (supported != VDP_TRUE || max_level < level ||
267  max_width < width || max_height < height)
268  return AVERROR(ENOTSUP);
269 
270  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
271  &func);
272  if (status != VDP_STATUS_OK)
273  return vdpau_error(status);
274  else
275  create = func;
276 
277  status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
278  &func);
279  if (status != VDP_STATUS_OK)
280  return vdpau_error(status);
281  else
282  vdctx->render = func;
283 
284  status = create(vdctx->device, profile, width, height, avctx->refs,
285  &vdctx->decoder);
286  if (status == VDP_STATUS_OK) {
287  vdctx->width = avctx->coded_width;
288  vdctx->height = avctx->coded_height;
289  }
290 
291  return vdpau_error(status);
292 }
293 
295 {
296  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
297  VdpDecoderDestroy *destroy;
298  void *func;
299  VdpStatus status;
300 
301  if (vdctx->device == VDP_INVALID_HANDLE)
302  return 0; /* Decoder created and destroyed by user */
303  if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
304  return 0;
305 
306  status = vdctx->get_proc_address(vdctx->device,
307  VDP_FUNC_ID_DECODER_DESTROY, &func);
308  if (status != VDP_STATUS_OK)
309  return vdpau_error(status);
310  else
311  destroy = func;
312 
313  status = destroy(vdctx->decoder);
314  return vdpau_error(status);
315 }
316 
318 {
319  VDPAUHWContext *hwctx = avctx->hwaccel_context;
320  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
321 
322  if (vdctx->device == VDP_INVALID_HANDLE)
323  return 0; /* Decoder created by user */
324  if (avctx->coded_width == vdctx->width &&
325  avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset))
326  return 0;
327 
328  FF_HW_SIMPLE_CALL(avctx, uninit);
329  return FF_HW_SIMPLE_CALL(avctx, init);
330 }
331 
333  av_unused const uint8_t *buffer,
334  av_unused uint32_t size)
335 {
336  pic_ctx->bitstream_buffers_allocated = 0;
337  pic_ctx->bitstream_buffers_used = 0;
338  pic_ctx->bitstream_buffers = NULL;
339  return 0;
340 }
341 
343  struct vdpau_picture_context *pic_ctx)
344 {
345  VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
346  AVVDPAUContext *hwctx = avctx->hwaccel_context;
347  VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
348  VdpStatus status;
349  int val;
350 
351  val = ff_vdpau_common_reinit(avctx);
352  if (val < 0)
353  return val;
354 
355  if (hwctx && !hwctx->render && hwctx->render2) {
356  status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info,
357  pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
358  } else
359  status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info,
360  pic_ctx->bitstream_buffers_used,
361  pic_ctx->bitstream_buffers);
362 
363  av_freep(&pic_ctx->bitstream_buffers);
364 
365  return vdpau_error(status);
366 }
367 
368 #if CONFIG_MPEG1_VDPAU_HWACCEL || \
369  CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
370  CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL
372 {
373  MpegEncContext *s = avctx->priv_data;
374  Picture *pic = s->current_picture_ptr;
375  struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
376  int val;
377 
378  val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
379  if (val < 0)
380  return val;
381 
382  ff_mpeg_draw_horiz_band(s, 0, s->avctx->height);
383  return 0;
384 }
385 #endif
386 
388  const uint8_t *buf, uint32_t size)
389 {
390  VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
391 
392  buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
393  (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
394  if (!buffers)
395  return AVERROR(ENOMEM);
396 
397  pic_ctx->bitstream_buffers = buffers;
398  buffers += pic_ctx->bitstream_buffers_used++;
399 
400  buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
401  buffers->bitstream = buf;
402  buffers->bitstream_bytes = size;
403  return 0;
404 }
405 
407 {
408  return av_mallocz(sizeof(VDPAUHWContext));
409 }
410 
411 int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
412  VdpGetProcAddress *get_proc, unsigned flags)
413 {
414  VDPAUHWContext *hwctx;
415 
417  return AVERROR(EINVAL);
418 
419  if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
420  return AVERROR(ENOMEM);
421 
422  hwctx = avctx->hwaccel_context;
423 
424  memset(hwctx, 0, sizeof(*hwctx));
425  hwctx->context.decoder = VDP_INVALID_HANDLE;
426  hwctx->device = device;
427  hwctx->get_proc_address = get_proc;
428  hwctx->flags = flags;
429  hwctx->reset = 1;
430  return 0;
431 }
432 
433 /* @}*/
func
int(* func)(AVBPrint *dst, const char *in, const char *arg)
Definition: jacosubdec.c:68
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
AVCodecContext::hwaccel_context
void * hwaccel_context
Legacy hardware accelerator context.
Definition: avcodec.h:1459
level
uint8_t level
Definition: svq3.c:204
ff_vdpau_common_frame_params
int ff_vdpau_common_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx)
Definition: vdpau.c:124
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
destroy
static void destroy(struct ResampleContext **c)
Definition: soxr_resample.c:64
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
vc1.h
ff_vdpau_common_reinit
static int ff_vdpau_common_reinit(AVCodecContext *avctx)
Definition: vdpau.c:317
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
av_unused
#define av_unused
Definition: attributes.h:131
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
VDPAUHWContext::get_proc_address
VdpGetProcAddress * get_proc_address
Definition: vdpau_internal.h:68
vdpau_picture_context::bitstream_buffers_used
int bitstream_buffers_used
Useful bitstream buffers in the bitstream buffers table.
Definition: vdpau_internal.h:112
w
uint8_t w
Definition: llviddspenc.c:38
internal.h
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:468
AVVDPAUDeviceContext::get_proc_address
VdpGetProcAddress * get_proc_address
Definition: hwcontext_vdpau.h:37
AVVDPAUDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_vdpau.h:35
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
FF_HW_SIMPLE_CALL
#define FF_HW_SIMPLE_CALL(avctx, function)
Definition: hwaccel_internal.h:174
Picture
Picture.
Definition: mpegpicture.h:46
AVVDPAUContext::render2
AVVDPAU_Render2 render2
Definition: vdpau.h:95
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
VDPAUContext::width
uint32_t width
Definition: vdpau_internal.h:94
VDPAUContext::render
VdpDecoderRender * render
VDPAU decoder render callback.
Definition: vdpau_internal.h:92
vdpau_internal.h
AV_HWACCEL_FLAG_IGNORE_LEVEL
#define AV_HWACCEL_FLAG_IGNORE_LEVEL
Hardware acceleration should be used for decoding even if the codec level used is unknown or higher t...
Definition: avcodec.h:2179
VDPAUHWContext::device
VdpDevice device
Definition: vdpau_internal.h:67
vdpau_picture_context
Definition: vdpau_internal.h:98
AVVDPAUContext
This structure is used to share data between the libavcodec library and the client video application.
Definition: vdpau.h:80
AVCodecContext::refs
int refs
number of reference frames
Definition: avcodec.h:1001
val
static double val(void *priv, double ch)
Definition: aeval.c:78
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:636
ff_vdpau_add_buffer
int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, const uint8_t *buf, uint32_t size)
Definition: vdpau.c:387
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:471
mpegvideodec.h
ff_vdpau_common_init
int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level)
Definition: vdpau.c:143
vdpau.h
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
av_fast_realloc
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:495
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:198
info
MIPS optimizations info
Definition: mips.txt:2
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
decode.h
limits.h
av_vdpau_bind_context
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, VdpGetProcAddress *get_proc, unsigned flags)
Associate a VDPAU device with a codec context for hardware acceleration.
Definition: vdpau.c:411
AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH
#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH
Hardware acceleration can output YUV pixel formats with a different chroma sampling than 4:2:0 and/or...
Definition: avcodec.h:2185
ff_vdpau_common_start_frame
int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx, av_unused const uint8_t *buffer, av_unused uint32_t size)
Definition: vdpau.c:332
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
ff_vdpau_get_surface_id
static uintptr_t ff_vdpau_get_surface_id(AVFrame *pic)
Extract VdpVideoSurface from an AVFrame.
Definition: vdpau_internal.h:38
Picture::hwaccel_picture_private
void * hwaccel_picture_private
RefStruct reference for hardware accelerator private data.
Definition: mpegpicture.h:70
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
frame
static AVFrame * frame
Definition: demux_decode.c:54
ff_decode_get_hw_frames_ctx
int ff_decode_get_hw_frames_ctx(AVCodecContext *avctx, enum AVHWDeviceType dev_type)
Make sure avctx.hw_frames_ctx is set.
Definition: decode.c:1108
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:451
if
if(ret)
Definition: filter_design.txt:179
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
hwaccel_internal.h
create
static struct ResampleContext * create(struct ResampleContext *c, int out_rate, int in_rate, int filter_size, int phase_shift, int linear, double cutoff, enum AVSampleFormat format, enum SwrFilterType filter_type, double kaiser_beta, double precision, int cheby, int exact_rational)
Definition: soxr_resample.c:32
VDPAUContext::height
uint32_t height
Definition: vdpau_internal.h:95
AVCodecContext::internal
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:476
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
ff_vdpau_common_end_frame
int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, struct vdpau_picture_context *pic_ctx)
Definition: vdpau.c:342
AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH
#define AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH
Hardware acceleration should still be attempted for decoding when the codec profile does not match th...
Definition: avcodec.h:2199
MAKE_ACCESSORS
#define MAKE_ACCESSORS(str, name, type, field)
Definition: vdpau.c:75
VDPAUHWContext
Definition: vdpau_internal.h:65
av_vdpau_get_surface_parameters
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, VdpChromaType *type, uint32_t *width, uint32_t *height)
Gets the parameters to create an adequate VDPAU video surface for the codec context using VDPAU hardw...
Definition: vdpau.c:80
VDPAUContext::device
VdpDevice device
VDPAU device handle.
Definition: vdpau_internal.h:77
ff_vdpau_common_uninit
int ff_vdpau_common_uninit(AVCodecContext *avctx)
Definition: vdpau.c:294
VDPAUContext
Definition: vdpau_internal.h:73
AVCodecInternal::hwaccel_priv_data
void * hwaccel_priv_data
hwaccel-specific private data
Definition: internal.h:124
size
int size
Definition: twinvq_data.h:10344
av_reallocp
int av_reallocp(void *ptr, size_t size)
Allocate, reallocate, or free a block of memory through a pointer to a pointer.
Definition: mem.c:186
VDPAUContext::decoder
VdpDecoder decoder
VDPAU decoder handle.
Definition: vdpau_internal.h:82
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:475
ff_mpeg_draw_horiz_band
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo_dec.c:541
ff_vdpau_mpeg_end_frame
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
height
#define height
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:187
VDPAUHWContext::reset
char reset
Definition: vdpau_internal.h:69
AVCodecContext::hwaccel_flags
int hwaccel_flags
Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active).
Definition: avcodec.h:1990
vdpau_picture_context::bitstream_buffers
VdpBitstreamBuffer * bitstream_buffers
Table of bitstream buffers.
Definition: vdpau_internal.h:117
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
av_vdpau_alloc_context
AVVDPAUContext * av_vdpau_alloc_context(void)
Allocate an AVVDPAUContext.
Definition: vdpau.c:406
AVVDPAU_Render2
int(* AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *, const VdpPictureInfo *, uint32_t, const VdpBitstreamBuffer *)
Definition: vdpau.h:62
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
VDPAUHWContext::context
AVVDPAUContext context
Definition: vdpau_internal.h:66
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
profile
int profile
Definition: mxfenc.c:2115
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1940
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
VDPAUContext::get_proc_address
VdpGetProcAddress * get_proc_address
VDPAU device driver.
Definition: vdpau_internal.h:87
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:472
AVVDPAUContext::decoder
VdpDecoder decoder
VDPAU decoder handle.
Definition: vdpau.h:86
vdpau_picture_context::bitstream_buffers_allocated
int bitstream_buffers_allocated
Allocated size of the bitstream_buffers table.
Definition: vdpau_internal.h:107
AVCodecContext
main external API structure.
Definition: avcodec.h:441
status
ov_status_e status
Definition: dnn_backend_openvino.c:119
vdpau_picture_context::info
union VDPAUPictureInfo info
VDPAU picture information.
Definition: vdpau_internal.h:102
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
Picture::f
struct AVFrame * f
Definition: mpegpicture.h:47
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:636
vdpau_error
static int vdpau_error(VdpStatus status)
Definition: vdpau.c:46
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
av_alloc_vdpaucontext
AVVDPAUContext * av_alloc_vdpaucontext(void)
allocation function for AVVDPAUContext
Definition: vdpau.c:70
VDPAUHWContext::flags
unsigned char flags
Definition: vdpau_internal.h:70
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:468
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVDPAUContext::render
VdpDecoderRender * render
VDPAU decoder render callback.
Definition: vdpau.h:93
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:285
h
h
Definition: vp9dsp_template.c:2038
AVCodecContext::sw_pix_fmt
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1810
AVVDPAUDeviceContext::device
VdpDevice device
Definition: hwcontext_vdpau.h:36
int
int
Definition: ffmpeg_filter.c:368
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:67