FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUDeviceContext {
36  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
37  VdpVideoSurfaceGetBitsYCbCr *get_data;
38  VdpVideoSurfacePutBitsYCbCr *put_data;
39  VdpVideoSurfaceCreate *surf_create;
40  VdpVideoSurfaceDestroy *surf_destroy;
41 
43  int nb_pix_fmts[3];
45 
46 typedef struct VDPAUFramesContext {
47  VdpVideoSurfaceGetBitsYCbCr *get_data;
48  VdpVideoSurfacePutBitsYCbCr *put_data;
49  VdpChromaType chroma_type;
51 
52  const enum AVPixelFormat *pix_fmts;
55 
56 typedef struct VDPAUPixFmtMap {
57  VdpYCbCrFormat vdpau_fmt;
60 
61 static const VDPAUPixFmtMap pix_fmts_420[] = {
62  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
63  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
64  { 0, AV_PIX_FMT_NONE, },
65 };
66 
67 static const VDPAUPixFmtMap pix_fmts_422[] = {
68  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
69  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
70  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
71  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
72  { 0, AV_PIX_FMT_NONE, },
73 };
74 
75 static const VDPAUPixFmtMap pix_fmts_444[] = {
76  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
77  { 0, AV_PIX_FMT_NONE, },
78 };
79 
80 static const struct {
81  VdpChromaType chroma_type;
83 } vdpau_pix_fmts[] = {
84  { VDP_CHROMA_TYPE_420, pix_fmts_420 },
85  { VDP_CHROMA_TYPE_422, pix_fmts_422 },
86  { VDP_CHROMA_TYPE_444, pix_fmts_444 },
87 };
88 
89 static int count_pixfmts(const VDPAUPixFmtMap *map)
90 {
91  int count = 0;
92  while (map->pix_fmt != AV_PIX_FMT_NONE) {
93  map++;
94  count++;
95  }
96  return count;
97 }
98 
100 {
101  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
102  VDPAUDeviceContext *priv = ctx->internal->priv;
103  int i;
104 
105  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
106  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
107  int nb_pix_fmts;
108 
109  nb_pix_fmts = count_pixfmts(map);
110  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
111  if (!priv->pix_fmts[i])
112  return AVERROR(ENOMEM);
113 
114  nb_pix_fmts = 0;
115  while (map->pix_fmt != AV_PIX_FMT_NONE) {
116  VdpBool supported;
117  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
118  map->vdpau_fmt, &supported);
119  if (err == VDP_STATUS_OK && supported)
120  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
121  map++;
122  }
123  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
124  priv->nb_pix_fmts[i] = nb_pix_fmts;
125  }
126 
127  return 0;
128 }
129 
130 #define GET_CALLBACK(id, result) \
131 do { \
132  void *tmp; \
133  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
134  if (err != VDP_STATUS_OK) { \
135  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
136  return AVERROR_UNKNOWN; \
137  } \
138  result = tmp; \
139 } while (0)
140 
142 {
143  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
144  VDPAUDeviceContext *priv = ctx->internal->priv;
145  VdpStatus err;
146  int ret;
147 
148  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
149  priv->get_transfer_caps);
150  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
151  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
152  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
153  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
154 
155  ret = vdpau_init_pixmfts(ctx);
156  if (ret < 0) {
157  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
158  return ret;
159  }
160 
161  return 0;
162 }
163 
165 {
166  VDPAUDeviceContext *priv = ctx->internal->priv;
167  int i;
168 
169  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
170  av_freep(&priv->pix_fmts[i]);
171 }
172 
173 static void vdpau_buffer_free(void *opaque, uint8_t *data)
174 {
175  AVHWFramesContext *ctx = opaque;
176  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
177  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
178 
179  device_priv->surf_destroy(surf);
180 }
181 
182 static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
183 {
184  AVHWFramesContext *ctx = opaque;
185  VDPAUFramesContext *priv = ctx->internal->priv;
186  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
187  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
188 
189  AVBufferRef *ret;
190  VdpVideoSurface surf;
191  VdpStatus err;
192 
193  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
194  ctx->width, ctx->height, &surf);
195  if (err != VDP_STATUS_OK) {
196  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
197  return NULL;
198  }
199 
200  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
202  if (!ret) {
203  device_priv->surf_destroy(surf);
204  return NULL;
205  }
206 
207  return ret;
208 }
209 
211 {
212  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
213  VDPAUFramesContext *priv = ctx->internal->priv;
214 
215  int i;
216 
217  switch (ctx->sw_format) {
218  case AV_PIX_FMT_YUV420P: priv->chroma_type = VDP_CHROMA_TYPE_420; break;
219  case AV_PIX_FMT_YUV422P: priv->chroma_type = VDP_CHROMA_TYPE_422; break;
220  case AV_PIX_FMT_YUV444P: priv->chroma_type = VDP_CHROMA_TYPE_444; break;
221  default:
222  av_log(ctx, AV_LOG_ERROR, "Unsupported data layout: %s\n",
224  return AVERROR(ENOSYS);
225  }
226 
227  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
228  if (vdpau_pix_fmts[i].chroma_type == priv->chroma_type) {
229  priv->chroma_idx = i;
230  priv->pix_fmts = device_priv->pix_fmts[i];
231  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
232  break;
233  }
234  }
235  if (!priv->pix_fmts) {
236  av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n", priv->chroma_type);
237  return AVERROR(ENOSYS);
238  }
239 
240  if (!ctx->pool) {
241  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
243  if (!ctx->internal->pool_internal)
244  return AVERROR(ENOMEM);
245  }
246 
247  priv->get_data = device_priv->get_data;
248  priv->put_data = device_priv->put_data;
249 
250  return 0;
251 }
252 
254 {
255  frame->buf[0] = av_buffer_pool_get(ctx->pool);
256  if (!frame->buf[0])
257  return AVERROR(ENOMEM);
258 
259  frame->data[3] = frame->buf[0]->data;
260  frame->format = AV_PIX_FMT_VDPAU;
261  frame->width = ctx->width;
262  frame->height = ctx->height;
263 
264  return 0;
265 }
266 
269  enum AVPixelFormat **formats)
270 {
271  VDPAUFramesContext *priv = ctx->internal->priv;
272 
273  enum AVPixelFormat *fmts;
274 
275  if (priv->nb_pix_fmts == 1) {
276  av_log(ctx, AV_LOG_ERROR,
277  "No target formats are supported for this chroma type\n");
278  return AVERROR(ENOSYS);
279  }
280 
281  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
282  if (!fmts)
283  return AVERROR(ENOMEM);
284 
285  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
286  *formats = fmts;
287 
288  return 0;
289 }
290 
292  const AVFrame *src)
293 {
294  VDPAUFramesContext *priv = ctx->internal->priv;
295  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
296 
297  void *data[3];
298  uint32_t linesize[3];
299 
300  const VDPAUPixFmtMap *map;
301  VdpYCbCrFormat vdpau_format;
302  VdpStatus err;
303  int i;
304 
305  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
306  data[i] = dst->data[i];
307  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
308  av_log(ctx, AV_LOG_ERROR,
309  "The linesize %d cannot be represented as uint32\n",
310  dst->linesize[i]);
311  return AVERROR(ERANGE);
312  }
313  linesize[i] = dst->linesize[i];
314  }
315 
316  map = vdpau_pix_fmts[priv->chroma_idx].map;
317  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
318  if (map[i].pix_fmt == dst->format) {
319  vdpau_format = map[i].vdpau_fmt;
320  break;
321  }
322  }
323  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
324  av_log(ctx, AV_LOG_ERROR,
325  "Unsupported target pixel format: %s\n",
327  return AVERROR(EINVAL);
328  }
329 
330  if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
331  FFSWAP(void*, data[1], data[2]);
332 
333  err = priv->get_data(surf, vdpau_format, data, linesize);
334  if (err != VDP_STATUS_OK) {
335  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
336  return AVERROR_UNKNOWN;
337  }
338 
339  return 0;
340 }
341 
343  const AVFrame *src)
344 {
345  VDPAUFramesContext *priv = ctx->internal->priv;
346  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
347 
348  const void *data[3];
349  uint32_t linesize[3];
350 
351  const VDPAUPixFmtMap *map;
352  VdpYCbCrFormat vdpau_format;
353  VdpStatus err;
354  int i;
355 
356  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
357  data[i] = src->data[i];
358  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
359  av_log(ctx, AV_LOG_ERROR,
360  "The linesize %d cannot be represented as uint32\n",
361  src->linesize[i]);
362  return AVERROR(ERANGE);
363  }
364  linesize[i] = src->linesize[i];
365  }
366 
367  map = vdpau_pix_fmts[priv->chroma_idx].map;
368  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
369  if (map[i].pix_fmt == src->format) {
370  vdpau_format = map[i].vdpau_fmt;
371  break;
372  }
373  }
374  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
375  av_log(ctx, AV_LOG_ERROR,
376  "Unsupported source pixel format: %s\n",
378  return AVERROR(EINVAL);
379  }
380 
381  if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
382  FFSWAP(const void*, data[1], data[2]);
383 
384  err = priv->put_data(surf, vdpau_format, data, linesize);
385  if (err != VDP_STATUS_OK) {
386  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
387  return AVERROR_UNKNOWN;
388  }
389 
390  return 0;
391 }
392 
393 #if HAVE_VDPAU_X11
394 #include <vdpau/vdpau_x11.h>
395 #include <X11/Xlib.h>
396 
397 typedef struct VDPAUDevicePriv {
398  VdpDeviceDestroy *device_destroy;
399  Display *dpy;
400 } VDPAUDevicePriv;
401 
402 static void vdpau_device_free(AVHWDeviceContext *ctx)
403 {
404  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
405  VDPAUDevicePriv *priv = ctx->user_opaque;
406 
407  if (priv->device_destroy)
408  priv->device_destroy(hwctx->device);
409  if (priv->dpy)
410  XCloseDisplay(priv->dpy);
411  av_freep(&priv);
412 }
413 
414 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
415  AVDictionary *opts, int flags)
416 {
417  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
418 
419  VDPAUDevicePriv *priv;
420  VdpStatus err;
421  VdpGetInformationString *get_information_string;
422  const char *display, *vendor;
423 
424  priv = av_mallocz(sizeof(*priv));
425  if (!priv)
426  return AVERROR(ENOMEM);
427 
428  ctx->user_opaque = priv;
429  ctx->free = vdpau_device_free;
430 
431  priv->dpy = XOpenDisplay(device);
432  if (!priv->dpy) {
433  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
434  XDisplayName(device));
435  return AVERROR_UNKNOWN;
436  }
437  display = XDisplayString(priv->dpy);
438 
439  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
440  &hwctx->device, &hwctx->get_proc_address);
441  if (err != VDP_STATUS_OK) {
442  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
443  display);
444  return AVERROR_UNKNOWN;
445  }
446 
447  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
448  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
449 
450  get_information_string(&vendor);
451  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
452  "X11 display %s\n", vendor, display);
453 
454  return 0;
455 }
456 #endif
457 
460  .name = "VDPAU",
461 
462  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
463  .device_priv_size = sizeof(VDPAUDeviceContext),
464  .frames_priv_size = sizeof(VDPAUFramesContext),
465 
466 #if HAVE_VDPAU_X11
467  .device_create = vdpau_device_create,
468 #endif
470  .device_uninit = vdpau_device_uninit,
471  .frames_init = vdpau_frames_init,
472  .frames_get_buffer = vdpau_get_buffer,
473  .transfer_get_formats = vdpau_transfer_get_formats,
474  .transfer_data_to = vdpau_transfer_data_to,
475  .transfer_data_from = vdpau_transfer_data_from,
476 
478 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:54
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:82
#define NULL
Definition: coverity.c:32
This struct is allocated as AVHWDeviceContext.hwctx.
static enum AVPixelFormat pix_fmt
This structure describes decoded (raw) audio or video data.
Definition: frame.h:187
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
#define GET_CALLBACK(id, result)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:188
Memory handling functions.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:370
VdpGetProcAddress * get_proc_address
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
VdpVideoSurfacePutBitsYCbCr * put_data
VdpVideoSurfacePutBitsYCbCr * put_data
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:222
VdpChromaType chroma_type
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
const HWContextType ff_hwcontext_type_vdpau
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:222
#define src
Definition: vp8dsp.c:254
VdpVideoSurfaceGetBitsYCbCr * get_data
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
enum AVPixelFormat * pix_fmts
AVBufferPool * pool_internal
enum AVHWDeviceType type
uint8_t
VdpChromaType chroma_type
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
static int flags
Definition: log.c:57
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
ptrdiff_t size
Definition: opengl_enc.c:101
enum AVPixelFormat * pix_fmts[3]
#define av_log(a,...)
static const VDPAUPixFmtMap pix_fmts_422[]
int width
width and height of the video frame
Definition: frame.h:239
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static void vdpau_buffer_free(void *opaque, uint8_t *data)
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:97
#define AVERROR(e)
Definition: error.h:43
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:90
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
GLsizei count
Definition: opengl_enc.c:109
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
AVDictionary * opts
Definition: movenc.c:50
static int vdpau_device_init(AVHWDeviceContext *ctx)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:142
static const VDPAUPixFmtMap pix_fmts_444[]
AVFormatContext * ctx
Definition: movenc.c:48
VdpYCbCrFormat vdpau_fmt
static int vdpau_frames_init(AVHWFramesContext *ctx)
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:218
#define FF_ARRAY_ELEMS(a)
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:251
VdpVideoSurfaceCreate * surf_create
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:218
uint8_t * data
The data buffer.
Definition: buffer.h:89
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:63
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:209
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:117
refcounted data buffer API
const VDPAUPixFmtMap * map
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:127
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:262
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:201
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:102
A reference to a data buffer.
Definition: buffer.h:81
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal and external API header
static int count_pixfmts(const VDPAUPixFmtMap *map)
static const struct @244 vdpau_pix_fmts[]
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:213
enum AVPixelFormat pix_fmt
static const VDPAUPixFmtMap pix_fmts_420[]
VdpVideoSurfaceDestroy * surf_destroy
AVHWFrameTransferDirection
Definition: hwcontext.h:335
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:183
int height
Definition: frame.h:239
static AVBufferRef * vdpau_pool_alloc(void *opaque, int size)
#define av_freep(p)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:334
#define av_malloc_array(a, b)
formats
Definition: signature.h:48
#define FFSWAP(type, a, b)
Definition: common.h:99
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:64
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2249
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:215
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
VdpVideoSurfaceGetBitsYCbCr * get_data