FFmpeg
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUPixFmtMap {
36  VdpYCbCrFormat vdpau_fmt;
39 
40 static const VDPAUPixFmtMap pix_fmts_420[] = {
41  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
42  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
43 #ifdef VDP_YCBCR_FORMAT_P016
44  { VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016 },
45  { VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010 },
46 #endif
47  { 0, AV_PIX_FMT_NONE, },
48 };
49 
50 static const VDPAUPixFmtMap pix_fmts_422[] = {
51  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
52  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
53  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
54  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
55  { 0, AV_PIX_FMT_NONE, },
56 };
57 
58 static const VDPAUPixFmtMap pix_fmts_444[] = {
59 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
60  { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
61 #endif
62 #ifdef VDP_YCBCR_FORMAT_P016
63  {VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16},
64 #endif
65  { 0, AV_PIX_FMT_NONE, },
66 };
67 
68 static const struct {
69  VdpChromaType chroma_type;
72 } vdpau_pix_fmts[] = {
73  { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
74  { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
75  { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
76 #ifdef VDP_YCBCR_FORMAT_P016
77  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 },
78  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 },
79  { VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 },
80  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 },
81  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 },
82 #endif
83 };
84 
85 typedef struct VDPAUDeviceContext {
86  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
87  VdpVideoSurfaceGetBitsYCbCr *get_data;
88  VdpVideoSurfacePutBitsYCbCr *put_data;
89  VdpVideoSurfaceCreate *surf_create;
90  VdpVideoSurfaceDestroy *surf_destroy;
91 
93  int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)];
95 
96 typedef struct VDPAUFramesContext {
97  VdpVideoSurfaceGetBitsYCbCr *get_data;
98  VdpVideoSurfacePutBitsYCbCr *put_data;
99  VdpChromaType chroma_type;
101 
102  const enum AVPixelFormat *pix_fmts;
105 
106 static int count_pixfmts(const VDPAUPixFmtMap *map)
107 {
108  int count = 0;
109  while (map->pix_fmt != AV_PIX_FMT_NONE) {
110  map++;
111  count++;
112  }
113  return count;
114 }
115 
117 {
118  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
119  VDPAUDeviceContext *priv = ctx->internal->priv;
120  int i;
121 
122  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
123  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
124  int nb_pix_fmts;
125 
126  nb_pix_fmts = count_pixfmts(map);
127  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
128  if (!priv->pix_fmts[i])
129  return AVERROR(ENOMEM);
130 
131  nb_pix_fmts = 0;
132  while (map->pix_fmt != AV_PIX_FMT_NONE) {
133  VdpBool supported;
134  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
135  map->vdpau_fmt, &supported);
136  if (err == VDP_STATUS_OK && supported)
137  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
138  map++;
139  }
140  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
141  priv->nb_pix_fmts[i] = nb_pix_fmts;
142  }
143 
144  return 0;
145 }
146 
147 #define GET_CALLBACK(id, result) \
148 do { \
149  void *tmp; \
150  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
151  if (err != VDP_STATUS_OK) { \
152  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
153  return AVERROR_UNKNOWN; \
154  } \
155  result = tmp; \
156 } while (0)
157 
159 {
160  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
161  VDPAUDeviceContext *priv = ctx->internal->priv;
162  VdpStatus err;
163  int ret;
164 
165  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
166  priv->get_transfer_caps);
167  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
168  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
169  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
170  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
171 
172  ret = vdpau_init_pixmfts(ctx);
173  if (ret < 0) {
174  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
175  return ret;
176  }
177 
178  return 0;
179 }
180 
182 {
183  VDPAUDeviceContext *priv = ctx->internal->priv;
184  int i;
185 
186  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
187  av_freep(&priv->pix_fmts[i]);
188 }
189 
191  const void *hwconfig,
192  AVHWFramesConstraints *constraints)
193 {
194  VDPAUDeviceContext *priv = ctx->internal->priv;
195  int nb_sw_formats = 0;
196  int i;
197 
199  sizeof(*constraints->valid_sw_formats));
200  if (!constraints->valid_sw_formats)
201  return AVERROR(ENOMEM);
202 
203  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
204  if (priv->nb_pix_fmts[i] > 1)
205  constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
206  }
207  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
208 
209  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
210  if (!constraints->valid_hw_formats)
211  return AVERROR(ENOMEM);
212 
213  constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
214  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
215 
216  return 0;
217 }
218 
219 static void vdpau_buffer_free(void *opaque, uint8_t *data)
220 {
221  AVHWFramesContext *ctx = opaque;
222  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
223  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
224 
225  device_priv->surf_destroy(surf);
226 }
227 
228 static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
229 {
230  AVHWFramesContext *ctx = opaque;
231  VDPAUFramesContext *priv = ctx->internal->priv;
232  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
233  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
234 
235  AVBufferRef *ret;
236  VdpVideoSurface surf;
237  VdpStatus err;
238 
239  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
240  ctx->width, ctx->height, &surf);
241  if (err != VDP_STATUS_OK) {
242  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
243  return NULL;
244  }
245 
246  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
248  if (!ret) {
249  device_priv->surf_destroy(surf);
250  return NULL;
251  }
252 
253  return ret;
254 }
255 
257 {
258  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
259  VDPAUFramesContext *priv = ctx->internal->priv;
260 
261  int i;
262 
263  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
264  if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
265  priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
266  priv->chroma_idx = i;
267  priv->pix_fmts = device_priv->pix_fmts[i];
268  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
269  break;
270  }
271  }
272  if (priv->nb_pix_fmts < 2) {
273  av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
275  return AVERROR(ENOSYS);
276  }
277 
278  if (!ctx->pool) {
279  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
281  if (!ctx->internal->pool_internal)
282  return AVERROR(ENOMEM);
283  }
284 
285  priv->get_data = device_priv->get_data;
286  priv->put_data = device_priv->put_data;
287 
288  return 0;
289 }
290 
292 {
293  frame->buf[0] = av_buffer_pool_get(ctx->pool);
294  if (!frame->buf[0])
295  return AVERROR(ENOMEM);
296 
297  frame->data[3] = frame->buf[0]->data;
298  frame->format = AV_PIX_FMT_VDPAU;
299  frame->width = ctx->width;
300  frame->height = ctx->height;
301 
302  return 0;
303 }
304 
307  enum AVPixelFormat **formats)
308 {
309  VDPAUFramesContext *priv = ctx->internal->priv;
310 
311  enum AVPixelFormat *fmts;
312 
313  if (priv->nb_pix_fmts == 1) {
314  av_log(ctx, AV_LOG_ERROR,
315  "No target formats are supported for this chroma type\n");
316  return AVERROR(ENOSYS);
317  }
318 
319  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
320  if (!fmts)
321  return AVERROR(ENOMEM);
322 
323  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
324  *formats = fmts;
325 
326  return 0;
327 }
328 
330  const AVFrame *src)
331 {
332  VDPAUFramesContext *priv = ctx->internal->priv;
333  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
334 
335  void *data[3];
336  uint32_t linesize[3];
337 
338  const VDPAUPixFmtMap *map;
339  VdpYCbCrFormat vdpau_format;
340  VdpStatus err;
341  int i;
342 
343  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
344  data[i] = dst->data[i];
345  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
346  av_log(ctx, AV_LOG_ERROR,
347  "The linesize %d cannot be represented as uint32\n",
348  dst->linesize[i]);
349  return AVERROR(ERANGE);
350  }
351  linesize[i] = dst->linesize[i];
352  }
353 
354  map = vdpau_pix_fmts[priv->chroma_idx].map;
355  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
356  if (map[i].pix_fmt == dst->format) {
357  vdpau_format = map[i].vdpau_fmt;
358  break;
359  }
360  }
361  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
362  av_log(ctx, AV_LOG_ERROR,
363  "Unsupported target pixel format: %s\n",
365  return AVERROR(EINVAL);
366  }
367 
368  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
369 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
370  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
371 #endif
372 #ifdef VDP_YCBCR_FORMAT_P016
373  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
374 #endif
375  )
376  FFSWAP(void*, data[1], data[2]);
377 
378  err = priv->get_data(surf, vdpau_format, data, linesize);
379  if (err != VDP_STATUS_OK) {
380  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
381  return AVERROR_UNKNOWN;
382  }
383 
384  return 0;
385 }
386 
388  const AVFrame *src)
389 {
390  VDPAUFramesContext *priv = ctx->internal->priv;
391  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
392 
393  const void *data[3];
394  uint32_t linesize[3];
395 
396  const VDPAUPixFmtMap *map;
397  VdpYCbCrFormat vdpau_format;
398  VdpStatus err;
399  int i;
400 
401  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
402  data[i] = src->data[i];
403  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
404  av_log(ctx, AV_LOG_ERROR,
405  "The linesize %d cannot be represented as uint32\n",
406  src->linesize[i]);
407  return AVERROR(ERANGE);
408  }
409  linesize[i] = src->linesize[i];
410  }
411 
412  map = vdpau_pix_fmts[priv->chroma_idx].map;
413  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
414  if (map[i].pix_fmt == src->format) {
415  vdpau_format = map[i].vdpau_fmt;
416  break;
417  }
418  }
419  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
420  av_log(ctx, AV_LOG_ERROR,
421  "Unsupported source pixel format: %s\n",
423  return AVERROR(EINVAL);
424  }
425 
426  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
427 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
428  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
429 #endif
430  )
431  FFSWAP(const void*, data[1], data[2]);
432 
433  err = priv->put_data(surf, vdpau_format, data, linesize);
434  if (err != VDP_STATUS_OK) {
435  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
436  return AVERROR_UNKNOWN;
437  }
438 
439  return 0;
440 }
441 
442 #if HAVE_VDPAU_X11
443 #include <vdpau/vdpau_x11.h>
444 #include <X11/Xlib.h>
445 
446 typedef struct VDPAUDevicePriv {
447  VdpDeviceDestroy *device_destroy;
448  Display *dpy;
449 } VDPAUDevicePriv;
450 
451 static void vdpau_device_free(AVHWDeviceContext *ctx)
452 {
453  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
454  VDPAUDevicePriv *priv = ctx->user_opaque;
455 
456  if (priv->device_destroy)
457  priv->device_destroy(hwctx->device);
458  if (priv->dpy)
459  XCloseDisplay(priv->dpy);
460  av_freep(&priv);
461 }
462 
463 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
464  AVDictionary *opts, int flags)
465 {
466  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
467 
468  VDPAUDevicePriv *priv;
469  VdpStatus err;
470  VdpGetInformationString *get_information_string;
471  const char *display, *vendor;
472 
473  priv = av_mallocz(sizeof(*priv));
474  if (!priv)
475  return AVERROR(ENOMEM);
476 
477  ctx->user_opaque = priv;
478  ctx->free = vdpau_device_free;
479 
480  priv->dpy = XOpenDisplay(device);
481  if (!priv->dpy) {
482  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
483  XDisplayName(device));
484  return AVERROR_UNKNOWN;
485  }
486  display = XDisplayString(priv->dpy);
487 
488  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
489  &hwctx->device, &hwctx->get_proc_address);
490  if (err != VDP_STATUS_OK) {
491  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
492  display);
493  return AVERROR_UNKNOWN;
494  }
495 
496  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
497  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
498 
499  get_information_string(&vendor);
500  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
501  "X11 display %s\n", vendor, display);
502 
503  return 0;
504 }
505 #endif
506 
509  .name = "VDPAU",
510 
511  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
512  .device_priv_size = sizeof(VDPAUDeviceContext),
513  .frames_priv_size = sizeof(VDPAUFramesContext),
514 
515 #if HAVE_VDPAU_X11
516  .device_create = vdpau_device_create,
517 #endif
519  .device_uninit = vdpau_device_uninit,
520  .frames_get_constraints = vdpau_frames_get_constraints,
521  .frames_init = vdpau_frames_init,
522  .frames_get_buffer = vdpau_get_buffer,
523  .transfer_get_formats = vdpau_transfer_get_formats,
524  .transfer_data_to = vdpau_transfer_data_to,
525  .transfer_data_from = vdpau_transfer_data_from,
526 
528 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
#define NULL
Definition: coverity.c:32
This struct is allocated as AVHWDeviceContext.hwctx.
This structure describes decoded (raw) audio or video data.
Definition: frame.h:308
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
enum AVPixelFormat frames_sw_format
#define GET_CALLBACK(id, result)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:192
Memory handling functions.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:499
VdpGetProcAddress * get_proc_address
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
VdpVideoSurfacePutBitsYCbCr * put_data
VdpVideoSurfacePutBitsYCbCr * put_data
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
VdpChromaType chroma_type
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
const HWContextType ff_hwcontext_type_vdpau
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
VdpVideoSurfaceGetBitsYCbCr * get_data
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
#define AV_PIX_FMT_P016
Definition: pixfmt.h:449
enum AVPixelFormat * pix_fmts
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
AVBufferPool * pool_internal
enum AVHWDeviceType type
uint8_t
VdpChromaType chroma_type
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
enum AVPixelFormat * pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:412
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
ptrdiff_t size
Definition: opengl_enc.c:100
#define av_log(a,...)
#define src
Definition: vp8dsp.c:254
static const VDPAUPixFmtMap pix_fmts_422[]
int width
Definition: frame.h:366
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
static void vdpau_buffer_free(void *opaque, uint8_t *data)
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:29
GLsizei count
Definition: opengl_enc.c:108
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVDictionary * opts
Definition: movenc.c:50
static int vdpau_device_init(AVHWDeviceContext *ctx)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
static const struct @309 vdpau_pix_fmts[]
static const VDPAUPixFmtMap pix_fmts_444[]
AVFormatContext * ctx
Definition: movenc.c:48
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
VdpYCbCrFormat vdpau_fmt
static int vdpau_frames_init(AVHWFramesContext *ctx)
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:219
#define FF_ARRAY_ELEMS(a)
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:381
VdpVideoSurfaceCreate * surf_create
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:339
uint8_t * data
The data buffer.
Definition: buffer.h:89
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:197
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
refcounted data buffer API
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
const VDPAUPixFmtMap * map
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:300
#define flags(name, subs,...)
Definition: cbs_av1.c:560
int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:322
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:109
A reference to a data buffer.
Definition: buffer.h:81
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal and external API header
static int count_pixfmts(const VDPAUPixFmtMap *map)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:201
enum AVPixelFormat pix_fmt
VdpVideoSurfaceDestroy * surf_destroy
static const VDPAUPixFmtMap pix_fmts_420[]
AVHWFrameTransferDirection
Definition: hwcontext.h:415
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:190
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:104
int height
Definition: frame.h:366
static AVBufferRef * vdpau_pool_alloc(void *opaque, int size)
#define av_freep(p)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:338
#define av_malloc_array(a, b)
formats
Definition: signature.h:48
#define FFSWAP(type, a, b)
Definition: common.h:99
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:71
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
VdpVideoSurfaceGetBitsYCbCr * get_data
int i
Definition: input.c:407