FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41 
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52 #include "avassert.h"
53 
54 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
55  (MFX_VERSION_MAJOR > (MAJOR) || \
56  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
57 
58 #define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR) \
59  ((MFX_VERSION.Major > (MAJOR)) || \
60  (MFX_VERSION.Major == (MAJOR) && MFX_VERSION.Minor >= (MINOR)))
61 
62 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
63 #define QSV_ONEVPL QSV_VERSION_ATLEAST(2, 0)
64 #define QSV_HAVE_OPAQUE !QSV_ONEVPL
65 
66 #if QSV_ONEVPL
67 #include <mfxdispatcher.h>
68 #else
69 #define MFXUnload(a) do { } while(0)
70 #endif
71 
72 typedef struct QSVDevicePriv {
75 
76 typedef struct QSVDeviceContext {
77  /**
78  * The public AVQSVDeviceContext. See hwcontext_qsv.h for it.
79  */
81 
82  mfxHDL handle;
83  mfxHandleType handle_type;
84  mfxVersion ver;
85  mfxIMPL impl;
86 
90 
91 typedef struct QSVFramesContext {
92  /**
93  * The public AVQSVFramesContext. See hwcontext_qsv.h for it.
94  */
96 
97  mfxSession session_download;
99  mfxSession session_upload;
101 #if HAVE_PTHREADS
102  pthread_mutex_t session_lock;
103 #endif
104 
106  mfxFrameSurface1 *surfaces_internal;
109 
110  // used in the frame allocator for non-opaque surfaces
111  mfxMemId *mem_ids;
112 #if QSV_HAVE_OPAQUE
113  // used in the opaque alloc request for opaque surfaces
114  mfxFrameSurface1 **surface_ptrs;
115 
116  mfxExtOpaqueSurfaceAlloc opaque_alloc;
117  mfxExtBuffer *ext_buffers[1];
118 #endif
122 
123 static const struct {
125  uint32_t fourcc;
126  uint16_t mfx_shift;
128  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12, 0 },
129  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4, 0 },
130  { AV_PIX_FMT_P010, MFX_FOURCC_P010, 1 },
131  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8, 0 },
133  MFX_FOURCC_YUY2, 0 },
134 #if CONFIG_VAAPI
136  MFX_FOURCC_UYVY, 0 },
137 #endif
138  { AV_PIX_FMT_Y210,
139  MFX_FOURCC_Y210, 1 },
140  // VUYX is used for VAAPI child device,
141  // the SDK only delares support for AYUV
142  { AV_PIX_FMT_VUYX,
143  MFX_FOURCC_AYUV, 0 },
144  // XV30 is used for VAAPI child device,
145  // the SDK only delares support for Y410
146  { AV_PIX_FMT_XV30,
147  MFX_FOURCC_Y410, 0 },
148 #if QSV_VERSION_ATLEAST(1, 31)
149  // P012 is used for VAAPI child device,
150  // the SDK only delares support for P016
151  { AV_PIX_FMT_P012,
152  MFX_FOURCC_P016, 1 },
153  // Y212 is used for VAAPI child device,
154  // the SDK only delares support for Y216
155  { AV_PIX_FMT_Y212,
156  MFX_FOURCC_Y216, 1 },
157  // XV36 is used for VAAPI child device,
158  // the SDK only delares support for Y416
159  { AV_PIX_FMT_XV36,
160  MFX_FOURCC_Y416, 1 },
161 #endif
162 };
163 
164 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
165  enum AVHWDeviceType base_dev_type,
166  void **base_handle);
167 
168 /**
169  * Caller needs to allocate enough space for base_handle pointer.
170  **/
171 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
172  enum AVHWDeviceType base_dev_type,
173  void **base_handle)
174 {
175  mfxHDLPair *handle_pair;
176  handle_pair = surf->Data.MemId;
177  switch (base_dev_type) {
178 #if CONFIG_VAAPI
180  base_handle[0] = handle_pair->first;
181  return 0;
182 #endif
183 #if CONFIG_D3D11VA
185  base_handle[0] = handle_pair->first;
186  base_handle[1] = handle_pair->second;
187  return 0;
188 #endif
189 #if CONFIG_DXVA2
191  base_handle[0] = handle_pair->first;
192  return 0;
193 #endif
194  }
195  return AVERROR(EINVAL);
196 }
197 
199 {
200  int i;
201  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
203  return supported_pixel_formats[i].fourcc;
204  }
205  return 0;
206 }
207 
209 {
210  for (int i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
212  return supported_pixel_formats[i].mfx_shift;
213  }
214 
215  return 0;
216 }
217 
218 #if CONFIG_D3D11VA
219 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
220 {
221  uint32_t bind_flags = 0;
222 
223  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
224  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
225  else
226  bind_flags = D3D11_BIND_DECODER;
227 
228  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
229  bind_flags = D3D11_BIND_RENDER_TARGET;
230 
231  return bind_flags;
232 }
233 #endif
234 
235 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
236 {
237  const AVPixFmtDescriptor *desc;
238  int i, planes_nb = 0;
239  if (dst->format != src->format)
240  return AVERROR(EINVAL);
241 
243 
244  for (i = 0; i < desc->nb_components; i++)
245  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
246 
247  for (i = 0; i < planes_nb; i++) {
248  int sheight, dheight, y;
249  ptrdiff_t swidth = av_image_get_linesize(src->format,
250  src->width,
251  i);
252  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
253  dst->width,
254  i);
255  const AVComponentDescriptor comp = desc->comp[i];
256  if (swidth < 0 || dwidth < 0) {
257  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
258  return AVERROR(EINVAL);
259  }
260  sheight = src->height;
261  dheight = dst->height;
262  if (i) {
263  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
264  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
265  }
266  //fill right padding
267  for (y = 0; y < sheight; y++) {
268  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
269  av_memcpy_backptr(line_ptr,
270  comp.depth > 8 ? 2 : 1,
271  dwidth - swidth);
272  }
273  //fill bottom padding
274  for (y = sheight; y < dheight; y++) {
275  memcpy(dst->data[i]+y*dst->linesize[i],
276  dst->data[i]+(sheight-1)*dst->linesize[i],
277  dwidth);
278  }
279  }
280  return 0;
281 }
282 
284 {
285  QSVDeviceContext *s = ctx->hwctx;
286  AVQSVDeviceContext *hwctx = &s->p;
287  int hw_handle_supported = 0;
288  mfxHandleType handle_type;
289  enum AVHWDeviceType device_type;
290  enum AVPixelFormat pix_fmt;
291  mfxStatus err;
292 
293  err = MFXQueryIMPL(hwctx->session, &s->impl);
294  if (err == MFX_ERR_NONE)
295  err = MFXQueryVersion(hwctx->session, &s->ver);
296  if (err != MFX_ERR_NONE) {
297  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
298  return AVERROR_UNKNOWN;
299  }
300 
301  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
302 #if CONFIG_VAAPI
303  handle_type = MFX_HANDLE_VA_DISPLAY;
304  device_type = AV_HWDEVICE_TYPE_VAAPI;
306  hw_handle_supported = 1;
307 #endif
308  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
309 #if CONFIG_D3D11VA
310  handle_type = MFX_HANDLE_D3D11_DEVICE;
311  device_type = AV_HWDEVICE_TYPE_D3D11VA;
313  hw_handle_supported = 1;
314 #endif
315  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
316 #if CONFIG_DXVA2
317  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
318  device_type = AV_HWDEVICE_TYPE_DXVA2;
320  hw_handle_supported = 1;
321 #endif
322  }
323 
324  if (hw_handle_supported) {
325  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
326  if (err == MFX_ERR_NONE) {
327  s->handle_type = handle_type;
328  s->child_device_type = device_type;
329  s->child_pix_fmt = pix_fmt;
330  }
331  }
332  if (!s->handle) {
333  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
334  "from the session\n");
335  }
336  return 0;
337 }
338 
340 {
341  QSVFramesContext *s = ctx->hwctx;
342 
343  if (s->session_download) {
344  MFXVideoVPP_Close(s->session_download);
345  MFXClose(s->session_download);
346  }
347  s->session_download = NULL;
348  s->session_download_init = 0;
349 
350  if (s->session_upload) {
351  MFXVideoVPP_Close(s->session_upload);
352  MFXClose(s->session_upload);
353  }
354  s->session_upload = NULL;
355  s->session_upload_init = 0;
356 
357 #if HAVE_PTHREADS
358  pthread_mutex_destroy(&s->session_lock);
359 #endif
360 
361  av_freep(&s->mem_ids);
362 #if QSV_HAVE_OPAQUE
363  av_freep(&s->surface_ptrs);
364 #endif
365  av_freep(&s->surfaces_internal);
366  av_freep(&s->handle_pairs_internal);
367  av_frame_unref(&s->realigned_upload_frame);
368  av_frame_unref(&s->realigned_download_frame);
369  av_buffer_unref(&s->child_frames_ref);
370 }
371 
372 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
373 {
374 }
375 
376 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
377 {
379  QSVFramesContext *s = ctx->hwctx;
380  AVQSVFramesContext *hwctx = &s->p;
381 
382  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
383  s->nb_surfaces_used++;
384  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
385  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
386  }
387 
388  return NULL;
389 }
390 
392 {
393  QSVDeviceContext *device_priv = ctx->device_ctx->hwctx;
394  QSVFramesContext *s = ctx->hwctx;
395  AVQSVFramesContext *hwctx = &s->p;
396 
397  AVBufferRef *child_device_ref = NULL;
398  AVBufferRef *child_frames_ref = NULL;
399 
400  AVHWDeviceContext *child_device_ctx;
401  AVHWFramesContext *child_frames_ctx;
402 
403  int i, ret = 0;
404 
405  if (!device_priv->handle) {
407  "Cannot create a non-opaque internal surface pool without "
408  "a hardware handle\n");
409  return AVERROR(EINVAL);
410  }
411 
412  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
413  if (!child_device_ref)
414  return AVERROR(ENOMEM);
415  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
416 
417 #if CONFIG_VAAPI
418  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
419  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
420  child_device_hwctx->display = (VADisplay)device_priv->handle;
421  }
422 #endif
423 #if CONFIG_D3D11VA
424  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
425  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
426  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
427  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
428  }
429 #endif
430 #if CONFIG_DXVA2
431  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
432  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
433  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
434  }
435 #endif
436 
437  ret = av_hwdevice_ctx_init(child_device_ref);
438  if (ret < 0) {
439  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
440  goto fail;
441  }
442 
443  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
444  if (!child_frames_ref) {
445  ret = AVERROR(ENOMEM);
446  goto fail;
447  }
448  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
449 
450  child_frames_ctx->format = device_priv->child_pix_fmt;
451  child_frames_ctx->sw_format = ctx->sw_format;
452  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
453  child_frames_ctx->width = FFALIGN(ctx->width, 16);
454  child_frames_ctx->height = FFALIGN(ctx->height, 16);
455 
456 #if CONFIG_D3D11VA
457  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
458  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
459  if (hwctx->frame_type == 0)
460  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
461  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
462  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
463  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
464  }
465 #endif
466 #if CONFIG_DXVA2
467  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
468  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
469  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
470  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
471  else
472  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
473  }
474 #endif
475 
476  ret = av_hwframe_ctx_init(child_frames_ref);
477  if (ret < 0) {
478  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
479  goto fail;
480  }
481 
482 #if CONFIG_VAAPI
483  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
484  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
485  for (i = 0; i < ctx->initial_pool_size; i++) {
486  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
487  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
488  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
489  }
490  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
491  }
492 #endif
493 #if CONFIG_D3D11VA
494  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
495  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
496  for (i = 0; i < ctx->initial_pool_size; i++) {
497  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
498  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
499  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
500  } else {
501  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
502  }
503  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
504  }
505  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
506  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
507  } else {
508  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
509  }
510  }
511 #endif
512 #if CONFIG_DXVA2
513  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
514  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
515  for (i = 0; i < ctx->initial_pool_size; i++) {
516  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
517  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
518  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
519  }
520  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
521  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
522  else
523  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
524  }
525 #endif
526 
527  s->child_frames_ref = child_frames_ref;
528  child_frames_ref = NULL;
529 
530 fail:
531  av_buffer_unref(&child_device_ref);
532  av_buffer_unref(&child_frames_ref);
533  return ret;
534 }
535 
536 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
537 {
538  const AVPixFmtDescriptor *desc;
539  uint32_t fourcc;
540 
541  desc = av_pix_fmt_desc_get(ctx->sw_format);
542  if (!desc)
543  return AVERROR(EINVAL);
544 
545  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
546  if (!fourcc)
547  return AVERROR(EINVAL);
548 
549  surf->Info.BitDepthLuma = desc->comp[0].depth;
550  surf->Info.BitDepthChroma = desc->comp[0].depth;
551  surf->Info.Shift = qsv_shift_from_pix_fmt(ctx->sw_format);
552 
553  if (desc->log2_chroma_w && desc->log2_chroma_h)
554  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
555  else if (desc->log2_chroma_w)
556  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
557  else
558  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
559 
560  surf->Info.FourCC = fourcc;
561  surf->Info.Width = FFALIGN(ctx->width, 16);
562  surf->Info.CropW = ctx->width;
563  surf->Info.Height = FFALIGN(ctx->height, 16);
564  surf->Info.CropH = ctx->height;
565  surf->Info.FrameRateExtN = 25;
566  surf->Info.FrameRateExtD = 1;
567  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
568 
569  return 0;
570 }
571 
573 {
574  QSVFramesContext *s = ctx->hwctx;
575  AVQSVFramesContext *frames_hwctx = &s->p;
576 
577  int i, ret = 0;
578 
579  if (ctx->initial_pool_size <= 0) {
580  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
581  return AVERROR(EINVAL);
582  }
583 
584  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
585  sizeof(*s->handle_pairs_internal));
586  if (!s->handle_pairs_internal)
587  return AVERROR(ENOMEM);
588 
589  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
590  sizeof(*s->surfaces_internal));
591  if (!s->surfaces_internal)
592  return AVERROR(ENOMEM);
593 
594  for (i = 0; i < ctx->initial_pool_size; i++) {
595  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
596  if (ret < 0)
597  return ret;
598  }
599 
600 #if QSV_HAVE_OPAQUE
601  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
603  if (ret < 0)
604  return ret;
605  }
606 #else
608  if (ret < 0)
609  return ret;
610 #endif
611 
612  ffhwframesctx(ctx)->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
614  if (!ffhwframesctx(ctx)->pool_internal)
615  return AVERROR(ENOMEM);
616 
617  frames_hwctx->surfaces = s->surfaces_internal;
618  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
619 
620  return 0;
621 }
622 
623 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
624  mfxFrameAllocResponse *resp)
625 {
626  AVHWFramesContext *ctx = pthis;
627  QSVFramesContext *s = ctx->hwctx;
628  AVQSVFramesContext *hwctx = &s->p;
629  mfxFrameInfo *i = &req->Info;
630  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
631 
632  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
633  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
634  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
635  return MFX_ERR_UNSUPPORTED;
636  if (i->Width > i1->Width || i->Height > i1->Height ||
637  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
638  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
639  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
640  i->Width, i->Height, i->FourCC, i->ChromaFormat,
641  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
642  return MFX_ERR_UNSUPPORTED;
643  }
644 
645  resp->mids = s->mem_ids;
646  resp->NumFrameActual = hwctx->nb_surfaces;
647 
648  return MFX_ERR_NONE;
649 }
650 
651 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
652 {
653  return MFX_ERR_NONE;
654 }
655 
656 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
657 {
658  return MFX_ERR_UNSUPPORTED;
659 }
660 
661 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
662 {
663  return MFX_ERR_UNSUPPORTED;
664 }
665 
666 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
667 {
668  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
669  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
670 
671  pair_dst->first = pair_src->first;
672 
673  if (pair_src->second != (mfxMemId)MFX_INFINITE)
674  pair_dst->second = pair_src->second;
675  return MFX_ERR_NONE;
676 }
677 
678 #if QSV_ONEVPL
679 
680 static int qsv_d3d11_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
681 {
682  int ret = AVERROR_UNKNOWN;
683 #if CONFIG_D3D11VA
684  mfxStatus sts;
685  IDXGIAdapter *pDXGIAdapter;
686  DXGI_ADAPTER_DESC adapterDesc;
687  IDXGIDevice *pDXGIDevice = NULL;
688  HRESULT hr;
689  ID3D11Device *device = handle;
690  mfxVariant impl_value;
691 
692  hr = ID3D11Device_QueryInterface(device, &IID_IDXGIDevice, (void**)&pDXGIDevice);
693  if (SUCCEEDED(hr)) {
694  hr = IDXGIDevice_GetAdapter(pDXGIDevice, &pDXGIAdapter);
695  if (FAILED(hr)) {
696  av_log(ctx, AV_LOG_ERROR, "Error IDXGIDevice_GetAdapter %d\n", hr);
697  IDXGIDevice_Release(pDXGIDevice);
698  return ret;
699  }
700 
701  hr = IDXGIAdapter_GetDesc(pDXGIAdapter, &adapterDesc);
702  if (FAILED(hr)) {
703  av_log(ctx, AV_LOG_ERROR, "Error IDXGIAdapter_GetDesc %d\n", hr);
704  goto fail;
705  }
706  } else {
707  av_log(ctx, AV_LOG_ERROR, "Error ID3D11Device_QueryInterface %d\n", hr);
708  return ret;
709  }
710 
711  impl_value.Type = MFX_VARIANT_TYPE_U16;
712  impl_value.Data.U16 = adapterDesc.DeviceId;
713  sts = MFXSetConfigFilterProperty(cfg,
714  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
715  if (sts != MFX_ERR_NONE) {
716  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
717  "DeviceID property: %d.\n", sts);
718  goto fail;
719  }
720 
721  impl_value.Type = MFX_VARIANT_TYPE_PTR;
722  impl_value.Data.Ptr = &adapterDesc.AdapterLuid;
723  sts = MFXSetConfigFilterProperty(cfg,
724  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
725  if (sts != MFX_ERR_NONE) {
726  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
727  "DeviceLUID property: %d.\n", sts);
728  goto fail;
729  }
730 
731  impl_value.Type = MFX_VARIANT_TYPE_U32;
732  impl_value.Data.U32 = 0x0001;
733  sts = MFXSetConfigFilterProperty(cfg,
734  (const mfxU8 *)"mfxExtendedDeviceId.LUIDDeviceNodeMask", impl_value);
735  if (sts != MFX_ERR_NONE) {
736  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
737  "LUIDDeviceNodeMask property: %d.\n", sts);
738  goto fail;
739  }
740 
741  ret = 0;
742 
743 fail:
744  IDXGIAdapter_Release(pDXGIAdapter);
745  IDXGIDevice_Release(pDXGIDevice);
746 #endif
747  return ret;
748 }
749 
750 static int qsv_d3d9_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
751 {
752  int ret = AVERROR_UNKNOWN;
753 #if CONFIG_DXVA2
754  mfxStatus sts;
755  IDirect3DDeviceManager9* devmgr = handle;
756  IDirect3DDevice9Ex *device = NULL;
757  HANDLE device_handle = 0;
758  IDirect3D9Ex *d3d9ex = NULL;
759  LUID luid;
760  D3DDEVICE_CREATION_PARAMETERS params;
761  HRESULT hr;
762  mfxVariant impl_value;
763 
764  hr = IDirect3DDeviceManager9_OpenDeviceHandle(devmgr, &device_handle);
765  if (FAILED(hr)) {
766  av_log(ctx, AV_LOG_ERROR, "Error OpenDeviceHandle %d\n", hr);
767  goto fail;
768  }
769 
770  hr = IDirect3DDeviceManager9_LockDevice(devmgr, device_handle, &device, TRUE);
771  if (FAILED(hr)) {
772  av_log(ctx, AV_LOG_ERROR, "Error LockDevice %d\n", hr);
773  IDirect3DDeviceManager9_CloseDeviceHandle(devmgr, device_handle);
774  goto fail;
775  }
776 
777  hr = IDirect3DDevice9Ex_GetCreationParameters(device, &params);
778  if (FAILED(hr)) {
779  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9_GetCreationParameters %d\n", hr);
780  IDirect3DDevice9Ex_Release(device);
781  goto unlock;
782  }
783 
784  hr = IDirect3DDevice9Ex_GetDirect3D(device, &d3d9ex);
785  if (FAILED(hr)) {
786  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
787  IDirect3DDevice9Ex_Release(device);
788  goto unlock;
789  }
790 
791  hr = IDirect3D9Ex_GetAdapterLUID(d3d9ex, params.AdapterOrdinal, &luid);
792  if (FAILED(hr)) {
793  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
794  goto release;
795  }
796 
797  impl_value.Type = MFX_VARIANT_TYPE_PTR;
798  impl_value.Data.Ptr = &luid;
799  sts = MFXSetConfigFilterProperty(cfg,
800  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
801  if (sts != MFX_ERR_NONE) {
802  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
803  "DeviceLUID property: %d.\n", sts);
804  goto release;
805  }
806 
807  ret = 0;
808 
809 release:
810  IDirect3D9Ex_Release(d3d9ex);
811  IDirect3DDevice9Ex_Release(device);
812 
813 unlock:
814  IDirect3DDeviceManager9_UnlockDevice(devmgr, device_handle, FALSE);
815  IDirect3DDeviceManager9_CloseDeviceHandle(devmgr, device_handle);
816 fail:
817 #endif
818  return ret;
819 }
820 
821 static int qsv_va_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
822 {
823 #if CONFIG_VAAPI
824 #if VA_CHECK_VERSION(1, 15, 0)
825  mfxStatus sts;
826  VADisplay dpy = handle;
827  VAStatus vas;
828  VADisplayAttribute attr = {
829  .type = VADisplayPCIID,
830  };
831  mfxVariant impl_value;
832 
833  vas = vaGetDisplayAttributes(dpy, &attr, 1);
834  if (vas == VA_STATUS_SUCCESS && attr.flags != VA_DISPLAY_ATTRIB_NOT_SUPPORTED) {
835  impl_value.Type = MFX_VARIANT_TYPE_U16;
836  impl_value.Data.U16 = (attr.value & 0xFFFF);
837  sts = MFXSetConfigFilterProperty(cfg,
838  (const mfxU8 *)"mfxImplDescription.mfxDeviceDescription.DeviceID", impl_value);
839  if (sts != MFX_ERR_NONE) {
840  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
841  "DeviceID property: %d.\n", sts);
842  goto fail;
843  }
844  } else {
845  av_log(ctx, AV_LOG_ERROR, "libva: Failed to get device id from the driver. Please "
846  "consider to upgrade the driver to support VA-API 1.15.0\n");
847  goto fail;
848  }
849 
850  return 0;
851 
852 fail:
853 #else
854  av_log(ctx, AV_LOG_ERROR, "libva: This version of libva doesn't support retrieving "
855  "the device information from the driver. Please consider to upgrade libva to "
856  "support VA-API 1.15.0\n");
857 #endif
858 #endif
859  return AVERROR_UNKNOWN;
860 }
861 
862 static int qsv_new_mfx_loader(void *ctx,
863  mfxHDL handle,
864  mfxHandleType handle_type,
865  mfxIMPL implementation,
866  mfxVersion *pver,
867  void **ploader)
868 {
869  mfxStatus sts;
870  mfxLoader loader = NULL;
871  mfxConfig cfg;
872  mfxVariant impl_value;
873 
874  *ploader = NULL;
875  loader = MFXLoad();
876  if (!loader) {
877  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
878  goto fail;
879  }
880 
881  /* Create configurations for implementation */
882  cfg = MFXCreateConfig(loader);
883  if (!cfg) {
884  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX configuration\n");
885  goto fail;
886  }
887 
888  impl_value.Type = MFX_VARIANT_TYPE_U32;
889  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
890  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
891  sts = MFXSetConfigFilterProperty(cfg,
892  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
893  if (sts != MFX_ERR_NONE) {
894  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
895  "property: %d.\n", sts);
896  goto fail;
897  }
898 
899  impl_value.Type = MFX_VARIANT_TYPE_U32;
900  impl_value.Data.U32 = pver->Version;
901  sts = MFXSetConfigFilterProperty(cfg,
902  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
903  impl_value);
904  if (sts != MFX_ERR_NONE) {
905  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
906  "property: %d.\n", sts);
907  goto fail;
908  }
909 
910  impl_value.Type = MFX_VARIANT_TYPE_U32;
911  impl_value.Data.U32 = 0x8086; // Intel device only
912  sts = MFXSetConfigFilterProperty(cfg,
913  (const mfxU8 *)"mfxImplDescription.VendorID", impl_value);
914  if (sts != MFX_ERR_NONE) {
915  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
916  "VendorID property: %d.\n", sts);
917  goto fail;
918  }
919 
920  if (MFX_HANDLE_VA_DISPLAY == handle_type) {
921  if (handle && qsv_va_update_config(ctx, handle, cfg))
922  goto fail;
923 
924  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_VAAPI;
925  } else if (MFX_HANDLE_D3D9_DEVICE_MANAGER == handle_type) {
926  if (handle && qsv_d3d9_update_config(ctx, handle, cfg))
927  goto fail;
928 
929  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D9;
930  } else {
931  if (handle && qsv_d3d11_update_config(ctx, handle, cfg))
932  goto fail;
933 
934  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D11;
935  }
936 
937  impl_value.Type = MFX_VARIANT_TYPE_U32;
938  sts = MFXSetConfigFilterProperty(cfg,
939  (const mfxU8 *)"mfxImplDescription.AccelerationMode", impl_value);
940  if (sts != MFX_ERR_NONE) {
941  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
942  "AccelerationMode property: %d.\n", sts);
943  goto fail;
944  }
945 
946  *ploader = loader;
947 
948  return 0;
949 
950 fail:
951  if (loader)
952  MFXUnload(loader);
953 
954  return AVERROR_UNKNOWN;
955 }
956 
957 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
958 {
959  mfxStatus sts;
960  mfxSession session = NULL;
961  uint32_t impl_idx = 0;
962  mfxVersion ver;
963 
964  while (1) {
965  /* Enumerate all implementations */
966  mfxImplDescription *impl_desc;
967 
968  sts = MFXEnumImplementations(loader, impl_idx,
969  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
970  (mfxHDL *)&impl_desc);
971  /* Failed to find an available implementation */
972  if (sts == MFX_ERR_NOT_FOUND)
973  break;
974  else if (sts != MFX_ERR_NONE) {
975  impl_idx++;
976  continue;
977  }
978 
979  sts = MFXCreateSession(loader, impl_idx, &session);
980  MFXDispReleaseImplDescription(loader, impl_desc);
981  if (sts == MFX_ERR_NONE)
982  break;
983 
984  impl_idx++;
985  }
986 
987  if (sts != MFX_ERR_NONE) {
988  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
989  goto fail;
990  }
991 
992  sts = MFXQueryVersion(session, &ver);
993  if (sts != MFX_ERR_NONE) {
994  av_log(ctx, AV_LOG_ERROR, "Error querying a MFX session: %d.\n", sts);
995  goto fail;
996  }
997 
998  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
999  "version is %d.%d\n", ver.Major, ver.Minor);
1000 
1001  *psession = session;
1002 
1003  return 0;
1004 
1005 fail:
1006  if (session)
1007  MFXClose(session);
1008 
1009  return AVERROR_UNKNOWN;
1010 }
1011 
1012 static int qsv_create_mfx_session(void *ctx,
1013  mfxHDL handle,
1014  mfxHandleType handle_type,
1015  mfxIMPL implementation,
1016  mfxVersion *pver,
1017  mfxSession *psession,
1018  void **ploader)
1019 {
1020  mfxLoader loader = NULL;
1021 
1023  "Use Intel(R) oneVPL to create MFX session, API version is "
1024  "%d.%d, the required implementation version is %d.%d\n",
1025  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1026 
1027  if (handle_type != MFX_HANDLE_VA_DISPLAY &&
1028  handle_type != MFX_HANDLE_D3D9_DEVICE_MANAGER &&
1029  handle_type != MFX_HANDLE_D3D11_DEVICE) {
1031  "Invalid MFX device handle type\n");
1032  return AVERROR(EXDEV);
1033  }
1034 
1035  *psession = NULL;
1036 
1037  if (!*ploader) {
1038  if (qsv_new_mfx_loader(ctx, handle, handle_type, implementation, pver, (void **)&loader))
1039  goto fail;
1040 
1041  av_assert0(loader);
1042  } else
1043  loader = *ploader; // Use the input mfxLoader to create mfx session
1044 
1045  if (qsv_create_mfx_session_from_loader(ctx, loader, psession))
1046  goto fail;
1047 
1048  if (!*ploader)
1049  *ploader = loader;
1050 
1051  return 0;
1052 
1053 fail:
1054  if (!*ploader && loader)
1055  MFXUnload(loader);
1056 
1057  return AVERROR_UNKNOWN;
1058 }
1059 
1060 #else
1061 
1062 static int qsv_create_mfx_session(void *ctx,
1063  mfxHDL handle,
1064  mfxHandleType handle_type,
1065  mfxIMPL implementation,
1066  mfxVersion *pver,
1067  mfxSession *psession,
1068  void **ploader)
1069 {
1070  mfxVersion ver;
1071  mfxStatus sts;
1072  mfxSession session = NULL;
1073 
1075  "Use Intel(R) Media SDK to create MFX session, API version is "
1076  "%d.%d, the required implementation version is %d.%d\n",
1077  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1078 
1079  *ploader = NULL;
1080  *psession = NULL;
1081  ver = *pver;
1082  sts = MFXInit(implementation, &ver, &session);
1083  if (sts != MFX_ERR_NONE) {
1084  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1085  "%d.\n", sts);
1086  goto fail;
1087  }
1088 
1089  sts = MFXQueryVersion(session, &ver);
1090  if (sts != MFX_ERR_NONE) {
1091  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: "
1092  "%d.\n", sts);
1093  goto fail;
1094  }
1095 
1096  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
1097  "version is %d.%d\n", ver.Major, ver.Minor);
1098 
1099  MFXClose(session);
1100 
1101  sts = MFXInit(implementation, &ver, &session);
1102  if (sts != MFX_ERR_NONE) {
1103  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1104  "%d.\n", sts);
1105  goto fail;
1106  }
1107 
1108  *psession = session;
1109 
1110  return 0;
1111 
1112 fail:
1113  if (session)
1114  MFXClose(session);
1115 
1116  return AVERROR_UNKNOWN;
1117 }
1118 
1119 #endif
1120 
1122  mfxSession *session, int upload)
1123 {
1124  QSVFramesContext *s = ctx->hwctx;
1125  AVQSVFramesContext *frames_hwctx = &s->p;
1126  QSVDeviceContext *device_priv = ctx->device_ctx->hwctx;
1127  AVQSVDeviceContext *hwctx = &device_priv->p;
1128  int opaque = 0;
1129 
1130  mfxFrameAllocator frame_allocator = {
1131  .pthis = ctx,
1132  .Alloc = frame_alloc,
1133  .Lock = frame_lock,
1134  .Unlock = frame_unlock,
1135  .GetHDL = frame_get_hdl,
1136  .Free = frame_free,
1137  };
1138 
1139  mfxVideoParam par;
1140  mfxStatus err;
1141  int ret = AVERROR_UNKNOWN;
1142  /* hwctx->loader is non-NULL for oneVPL user and NULL for non-oneVPL user */
1143  void **loader = &hwctx->loader;
1144  mfxSession parent_session = hwctx->session;
1145  mfxIMPL impl;
1146  mfxVersion ver;
1147 
1148  err = MFXQueryIMPL(parent_session, &impl);
1149  if (err == MFX_ERR_NONE)
1150  err = MFXQueryVersion(parent_session, &ver);
1151  if (err != MFX_ERR_NONE) {
1152  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes.\n");
1153  return AVERROR_UNKNOWN;
1154  }
1155 
1156 #if QSV_HAVE_OPAQUE
1157  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1158 #endif
1159 
1160  ret = qsv_create_mfx_session(ctx, device_priv->handle, device_priv->handle_type,
1161  device_priv->impl, &device_priv->ver, session, loader);
1162  if (ret)
1163  goto fail;
1164 
1165  if (device_priv->handle) {
1166  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
1167  device_priv->handle);
1168  if (err != MFX_ERR_NONE) {
1169  ret = AVERROR_UNKNOWN;
1170  goto fail;
1171  }
1172  }
1173 
1174  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1175  err = MFXJoinSession(parent_session, *session);
1176  if (err != MFX_ERR_NONE) {
1177  av_log(ctx, AV_LOG_ERROR, "Error joining session.\n");
1178  ret = AVERROR_UNKNOWN;
1179  goto fail;
1180  }
1181  }
1182 
1183  if (!opaque) {
1184  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
1185  if (err != MFX_ERR_NONE) {
1186  ret = AVERROR_UNKNOWN;
1187  goto fail;
1188  }
1189  }
1190 
1191  memset(&par, 0, sizeof(par));
1192 
1193  if (!opaque) {
1194  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
1195  MFX_IOPATTERN_IN_VIDEO_MEMORY;
1196  }
1197 #if QSV_HAVE_OPAQUE
1198  else {
1199  par.ExtParam = s->ext_buffers;
1200  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
1201  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
1202  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
1203  }
1204 #endif
1205 
1206  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
1207  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
1208  par.AsyncDepth = 1;
1209 
1210  par.vpp.In = frames_hwctx->surfaces[0].Info;
1211 
1212  /* Apparently VPP requires the frame rate to be set to some value, otherwise
1213  * init will fail (probably for the framerate conversion filter). Since we
1214  * are only doing data upload/download here, we just invent an arbitrary
1215  * value */
1216  par.vpp.In.FrameRateExtN = 25;
1217  par.vpp.In.FrameRateExtD = 1;
1218  par.vpp.Out = par.vpp.In;
1219 
1220  err = MFXVideoVPP_Init(*session, &par);
1221  if (err != MFX_ERR_NONE) {
1222  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
1223  "Surface upload/download will not be possible\n");
1224 
1225  ret = AVERROR_UNKNOWN;
1226  goto fail;
1227  }
1228 
1229  return 0;
1230 
1231 fail:
1232  if (*session)
1233  MFXClose(*session);
1234 
1235  *session = NULL;
1236 
1237  return ret;
1238 }
1239 
1241 {
1242  QSVFramesContext *s = ctx->hwctx;
1243  AVQSVFramesContext *frames_hwctx = &s->p;
1244 
1245  int opaque = 0;
1246 
1247  uint32_t fourcc;
1248  int i, ret;
1249 
1250 #if QSV_HAVE_OPAQUE
1251  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1252 #endif
1253 
1254  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
1255  if (!fourcc) {
1256  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
1257  return AVERROR(ENOSYS);
1258  }
1259 
1260  if (!ctx->pool) {
1262  if (ret < 0) {
1263  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
1264  return ret;
1265  }
1266  }
1267 
1268  if (!opaque) {
1269  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
1270  if (!s->mem_ids)
1271  return AVERROR(ENOMEM);
1272 
1273  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1274  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
1275  }
1276 #if QSV_HAVE_OPAQUE
1277  else {
1278  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
1279  sizeof(*s->surface_ptrs));
1280  if (!s->surface_ptrs)
1281  return AVERROR(ENOMEM);
1282 
1283  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1284  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
1285 
1286  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
1287  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
1288  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
1289 
1290  s->opaque_alloc.Out = s->opaque_alloc.In;
1291 
1292  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
1293  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
1294 
1295  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
1296  }
1297 #endif
1298 
1299  s->session_download = NULL;
1300  s->session_upload = NULL;
1301 
1302  s->session_download_init = 0;
1303  s->session_upload_init = 0;
1304 
1305 #if HAVE_PTHREADS
1306  pthread_mutex_init(&s->session_lock, NULL);
1307 #endif
1308 
1309  return 0;
1310 }
1311 
1313 {
1314  frame->buf[0] = av_buffer_pool_get(ctx->pool);
1315  if (!frame->buf[0])
1316  return AVERROR(ENOMEM);
1317 
1318  frame->data[3] = frame->buf[0]->data;
1319  frame->format = AV_PIX_FMT_QSV;
1320  frame->width = ctx->width;
1321  frame->height = ctx->height;
1322 
1323  return 0;
1324 }
1325 
1327  enum AVHWFrameTransferDirection dir,
1328  enum AVPixelFormat **formats)
1329 {
1330  enum AVPixelFormat *fmts;
1331 
1332  fmts = av_malloc_array(2, sizeof(*fmts));
1333  if (!fmts)
1334  return AVERROR(ENOMEM);
1335 
1336  fmts[0] = ctx->sw_format;
1337  fmts[1] = AV_PIX_FMT_NONE;
1338 
1339  *formats = fmts;
1340 
1341  return 0;
1342 }
1343 
1345  AVHWFramesContext *src_ctx, int flags)
1346 {
1347  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
1348  int i;
1349 
1350  switch (dst_ctx->device_ctx->type) {
1351 #if CONFIG_VAAPI
1353  {
1354  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
1355  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
1356  sizeof(*dst_hwctx->surface_ids));
1357  if (!dst_hwctx->surface_ids)
1358  return AVERROR(ENOMEM);
1359  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1360  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1361  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
1362  }
1363  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1364  }
1365  break;
1366 #endif
1367 #if CONFIG_D3D11VA
1369  {
1370  D3D11_TEXTURE2D_DESC texDesc;
1371  dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
1372  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
1373  dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
1374  sizeof(*dst_hwctx->texture_infos));
1375  if (!dst_hwctx->texture_infos)
1376  return AVERROR(ENOMEM);
1377  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
1378  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
1379  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1380  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1381  dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
1382  dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
1383  }
1384  ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
1385  dst_hwctx->BindFlags = texDesc.BindFlags;
1386  }
1387  break;
1388 #endif
1389 #if CONFIG_DXVA2
1391  {
1392  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
1393  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
1394  sizeof(*dst_hwctx->surfaces));
1395  if (!dst_hwctx->surfaces)
1396  return AVERROR(ENOMEM);
1397  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1398  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1399  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
1400  }
1401  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1402  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
1403  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
1404  else
1405  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
1406  }
1407  break;
1408 #endif
1409  default:
1410  return AVERROR(ENOSYS);
1411  }
1412 
1413  return 0;
1414 }
1415 
1417  AVFrame *dst, const AVFrame *src, int flags)
1418 {
1419  QSVFramesContext *s = ctx->hwctx;
1420  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
1421  AVHWFramesContext *child_frames_ctx;
1422  const AVPixFmtDescriptor *desc;
1423  uint8_t *child_data;
1424  AVFrame *dummy;
1425  int ret = 0;
1426 
1427  if (!s->child_frames_ref)
1428  return AVERROR(ENOSYS);
1429  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1430 
1431  switch (child_frames_ctx->device_ctx->type) {
1432 #if CONFIG_VAAPI
1434  {
1435  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1436  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
1437  * we need this casting for vaapi.
1438  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
1439  * to avoid compile warning */
1440  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
1441  break;
1442  }
1443 #endif
1444 #if CONFIG_D3D11VA
1446  {
1447  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1448  child_data = pair->first;
1449  break;
1450  }
1451 #endif
1452 #if CONFIG_DXVA2
1454  {
1455  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1456  child_data = pair->first;
1457  break;
1458  }
1459 #endif
1460  default:
1461  return AVERROR(ENOSYS);
1462  }
1463 
1464  if (dst->format == child_frames_ctx->format) {
1465  ret = ff_hwframe_map_create(s->child_frames_ref,
1466  dst, src, NULL, NULL);
1467  if (ret < 0)
1468  return ret;
1469 
1470  dst->width = src->width;
1471  dst->height = src->height;
1472 
1473  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1474  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1475  dst->data[0] = pair->first;
1476  dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1477  } else {
1478  dst->data[3] = child_data;
1479  }
1480 
1481  return 0;
1482  }
1483 
1485  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
1486  // This only supports mapping to software.
1487  return AVERROR(ENOSYS);
1488  }
1489 
1490  dummy = av_frame_alloc();
1491  if (!dummy)
1492  return AVERROR(ENOMEM);
1493 
1494  dummy->buf[0] = av_buffer_ref(src->buf[0]);
1495  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
1496  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
1497  goto fail;
1498 
1499  dummy->format = child_frames_ctx->format;
1500  dummy->width = src->width;
1501  dummy->height = src->height;
1502 
1503  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1504  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1505  dummy->data[0] = pair->first;
1506  dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1507  } else {
1508  dummy->data[3] = child_data;
1509  }
1510 
1511  ret = av_hwframe_map(dst, dummy, flags);
1512 
1513 fail:
1514  av_frame_free(&dummy);
1515 
1516  return ret;
1517 }
1518 
1520  const AVFrame *src)
1521 {
1522  QSVFramesContext *s = ctx->hwctx;
1523  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1524  int download = !!src->hw_frames_ctx;
1525  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
1526 
1527  AVFrame *dummy;
1528  int ret;
1529 
1530  dummy = av_frame_alloc();
1531  if (!dummy)
1532  return AVERROR(ENOMEM);
1533 
1534  dummy->format = child_frames_ctx->format;
1535  dummy->width = src->width;
1536  dummy->height = src->height;
1537  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
1538  dummy->data[3] = surf->Data.MemId;
1539  dummy->hw_frames_ctx = s->child_frames_ref;
1540 
1541  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
1543 
1544  dummy->buf[0] = NULL;
1545  dummy->data[3] = NULL;
1546  dummy->hw_frames_ctx = NULL;
1547 
1548  av_frame_free(&dummy);
1549 
1550  return ret;
1551 }
1552 
1553 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
1554 {
1555  switch (frame->format) {
1556  case AV_PIX_FMT_NV12:
1557  case AV_PIX_FMT_P010:
1558  case AV_PIX_FMT_P012:
1559  surface->Data.Y = frame->data[0];
1560  surface->Data.UV = frame->data[1];
1561  break;
1562 
1563  case AV_PIX_FMT_YUV420P:
1564  surface->Data.Y = frame->data[0];
1565  surface->Data.U = frame->data[1];
1566  surface->Data.V = frame->data[2];
1567  break;
1568 
1569  case AV_PIX_FMT_BGRA:
1570  surface->Data.B = frame->data[0];
1571  surface->Data.G = frame->data[0] + 1;
1572  surface->Data.R = frame->data[0] + 2;
1573  surface->Data.A = frame->data[0] + 3;
1574  break;
1575  case AV_PIX_FMT_YUYV422:
1576  surface->Data.Y = frame->data[0];
1577  surface->Data.U = frame->data[0] + 1;
1578  surface->Data.V = frame->data[0] + 3;
1579  break;
1580 
1581  case AV_PIX_FMT_Y210:
1582  case AV_PIX_FMT_Y212:
1583  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1584  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1585  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1586  break;
1587  case AV_PIX_FMT_VUYX:
1588  surface->Data.V = frame->data[0];
1589  surface->Data.U = frame->data[0] + 1;
1590  surface->Data.Y = frame->data[0] + 2;
1591  // Only set Data.A to a valid address, the SDK doesn't
1592  // use the value from the frame.
1593  surface->Data.A = frame->data[0] + 3;
1594  break;
1595  case AV_PIX_FMT_XV30:
1596  surface->Data.U = frame->data[0];
1597  break;
1598  case AV_PIX_FMT_XV36:
1599  surface->Data.U = frame->data[0];
1600  surface->Data.Y = frame->data[0] + 2;
1601  surface->Data.V = frame->data[0] + 4;
1602  // Only set Data.A to a valid address, the SDK doesn't
1603  // use the value from the frame.
1604  surface->Data.A = frame->data[0] + 6;
1605  break;
1606 #if CONFIG_VAAPI
1607  case AV_PIX_FMT_UYVY422:
1608  surface->Data.Y = frame->data[0] + 1;
1609  surface->Data.U = frame->data[0];
1610  surface->Data.V = frame->data[0] + 2;
1611  break;
1612 #endif
1613  default:
1614  return MFX_ERR_UNSUPPORTED;
1615  }
1616  surface->Data.Pitch = frame->linesize[0];
1617  surface->Data.TimeStamp = frame->pts;
1618 
1619  return 0;
1620 }
1621 
1623 {
1624  QSVFramesContext *s = ctx->hwctx;
1625  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1626  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1627  int ret = 0;
1628 
1629  if (atomic_load(inited))
1630  return 0;
1631 
1632 #if HAVE_PTHREADS
1633  pthread_mutex_lock(&s->session_lock);
1634 #endif
1635 
1636  if (!atomic_load(inited)) {
1637  ret = qsv_init_internal_session(ctx, session, upload);
1638  atomic_store(inited, 1);
1639  }
1640 
1641 #if HAVE_PTHREADS
1642  pthread_mutex_unlock(&s->session_lock);
1643 #endif
1644 
1645  return ret;
1646 }
1647 
1649  const AVFrame *src)
1650 {
1651  QSVFramesContext *s = ctx->hwctx;
1652  mfxFrameSurface1 out = {{ 0 }};
1653  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1654 
1655  mfxSyncPoint sync = NULL;
1656  mfxStatus err;
1657  int ret = 0;
1658  /* download to temp frame if the output is not padded as libmfx requires */
1659  AVFrame *tmp_frame = &s->realigned_download_frame;
1660  AVFrame *dst_frame;
1661  int realigned = 0;
1662 
1664  if (ret < 0)
1665  return ret;
1666 
1667  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1668  * Height must be a multiple of 16 for progressive frame sequence and a
1669  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1670  if (dst->height & 15 || dst->linesize[0] & 15) {
1671  realigned = 1;
1672  if (tmp_frame->format != dst->format ||
1673  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1674  tmp_frame->height != FFALIGN(dst->height, 16)) {
1675  av_frame_unref(tmp_frame);
1676 
1677  tmp_frame->format = dst->format;
1678  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1679  tmp_frame->height = FFALIGN(dst->height, 16);
1680  ret = av_frame_get_buffer(tmp_frame, 0);
1681  if (ret < 0)
1682  return ret;
1683  }
1684  }
1685 
1686  dst_frame = realigned ? tmp_frame : dst;
1687 
1688  if (!s->session_download) {
1689  if (s->child_frames_ref)
1690  return qsv_transfer_data_child(ctx, dst_frame, src);
1691 
1692  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1693  return AVERROR(ENOSYS);
1694  }
1695 
1696  out.Info = in->Info;
1697  map_frame_to_surface(dst_frame, &out);
1698 
1699  do {
1700  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1701  if (err == MFX_WRN_DEVICE_BUSY)
1702  av_usleep(1);
1703  } while (err == MFX_WRN_DEVICE_BUSY);
1704 
1705  if (err < 0 || !sync) {
1706  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1707  return AVERROR_UNKNOWN;
1708  }
1709 
1710  do {
1711  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1712  } while (err == MFX_WRN_IN_EXECUTION);
1713  if (err < 0) {
1714  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1715  return AVERROR_UNKNOWN;
1716  }
1717 
1718  if (realigned) {
1719  tmp_frame->width = dst->width;
1720  tmp_frame->height = dst->height;
1721  ret = av_frame_copy(dst, tmp_frame);
1722  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1723  tmp_frame->height = FFALIGN(dst->height, 16);
1724  if (ret < 0)
1725  return ret;
1726  }
1727 
1728  return 0;
1729 }
1730 
1732  const AVFrame *src)
1733 {
1734  QSVFramesContext *s = ctx->hwctx;
1735  mfxFrameSurface1 in = {{ 0 }};
1736  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1737  mfxFrameInfo tmp_info;
1738 
1739  mfxSyncPoint sync = NULL;
1740  mfxStatus err;
1741  int ret = 0;
1742  /* make a copy if the input is not padded as libmfx requires */
1743  AVFrame *tmp_frame = &s->realigned_upload_frame;
1744  const AVFrame *src_frame;
1745  int realigned = 0;
1746 
1748  if (ret < 0)
1749  return ret;
1750 
1751  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1752  * Height must be a multiple of 16 for progressive frame sequence and a
1753  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1754  if (src->height & 15 || src->linesize[0] & 15) {
1755  realigned = 1;
1756  if (tmp_frame->format != src->format ||
1757  tmp_frame->width != FFALIGN(src->width, 16) ||
1758  tmp_frame->height != FFALIGN(src->height, 16)) {
1759  av_frame_unref(tmp_frame);
1760 
1761  tmp_frame->format = src->format;
1762  tmp_frame->width = FFALIGN(src->width, 16);
1763  tmp_frame->height = FFALIGN(src->height, 16);
1764  ret = av_frame_get_buffer(tmp_frame, 0);
1765  if (ret < 0)
1766  return ret;
1767  }
1768  ret = av_frame_copy(tmp_frame, src);
1769  if (ret < 0) {
1770  av_frame_unref(tmp_frame);
1771  return ret;
1772  }
1773  ret = qsv_fill_border(tmp_frame, src);
1774  if (ret < 0) {
1775  av_frame_unref(tmp_frame);
1776  return ret;
1777  }
1778 
1779  tmp_info = out->Info;
1780  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1781  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1782  }
1783 
1784  src_frame = realigned ? tmp_frame : src;
1785 
1786  if (!s->session_upload) {
1787  if (s->child_frames_ref)
1788  return qsv_transfer_data_child(ctx, dst, src_frame);
1789 
1790  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1791  return AVERROR(ENOSYS);
1792  }
1793 
1794  in.Info = out->Info;
1795  map_frame_to_surface(src_frame, &in);
1796 
1797  do {
1798  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1799  if (err == MFX_WRN_DEVICE_BUSY)
1800  av_usleep(1);
1801  } while (err == MFX_WRN_DEVICE_BUSY);
1802 
1803  if (err < 0 || !sync) {
1804  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1805  return AVERROR_UNKNOWN;
1806  }
1807 
1808  do {
1809  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1810  } while (err == MFX_WRN_IN_EXECUTION);
1811  if (err < 0) {
1812  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1813  return AVERROR_UNKNOWN;
1814  }
1815 
1816  if (realigned) {
1817  out->Info.CropW = tmp_info.CropW;
1818  out->Info.CropH = tmp_info.CropH;
1819  }
1820 
1821  return 0;
1822 }
1823 
1825  AVHWFramesContext *src_ctx, int flags)
1826 {
1827  QSVFramesContext *s = dst_ctx->hwctx;
1828  AVQSVFramesContext *dst_hwctx = &s->p;
1829  int i;
1830 
1831  if (src_ctx->initial_pool_size == 0) {
1832  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1833  "mapped to QSV frames.\n");
1834  return AVERROR(EINVAL);
1835  }
1836 
1837  switch (src_ctx->device_ctx->type) {
1838 #if CONFIG_VAAPI
1840  {
1841  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1842  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1843  sizeof(*s->handle_pairs_internal));
1844  if (!s->handle_pairs_internal)
1845  return AVERROR(ENOMEM);
1846  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1847  sizeof(*s->surfaces_internal));
1848  if (!s->surfaces_internal)
1849  return AVERROR(ENOMEM);
1850  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1851  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1852  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1853  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1854  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1855  }
1856  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1857  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1858  }
1859  break;
1860 #endif
1861 #if CONFIG_D3D11VA
1863  {
1864  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1865  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1866  sizeof(*s->handle_pairs_internal));
1867  if (!s->handle_pairs_internal)
1868  return AVERROR(ENOMEM);
1869  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1870  sizeof(*s->surfaces_internal));
1871  if (!s->surfaces_internal)
1872  return AVERROR(ENOMEM);
1873  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1874  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1875  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1876  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1877  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1878  } else {
1879  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1880  }
1881  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1882  }
1883  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1884  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1885  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1886  } else {
1887  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1888  }
1889  }
1890  break;
1891 #endif
1892 #if CONFIG_DXVA2
1894  {
1895  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1896  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1897  sizeof(*s->handle_pairs_internal));
1898  if (!s->handle_pairs_internal)
1899  return AVERROR(ENOMEM);
1900  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1901  sizeof(*s->surfaces_internal));
1902  if (!s->surfaces_internal)
1903  return AVERROR(ENOMEM);
1904  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1905  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1906  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1907  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1908  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1909  }
1910  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1911  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1912  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1913  else
1914  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1915  }
1916  break;
1917 #endif
1918  default:
1919  return AVERROR(ENOSYS);
1920  }
1921 
1922  dst_hwctx->surfaces = s->surfaces_internal;
1923 
1924  return 0;
1925 }
1926 
1927 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1928  AVFrame *dst, const AVFrame *src, int flags)
1929 {
1930  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1931  int i, err, index = -1;
1932 
1933  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1934  switch(src->format) {
1935 #if CONFIG_VAAPI
1936  case AV_PIX_FMT_VAAPI:
1937  {
1938  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1939  if (*(VASurfaceID*)pair->first == (VASurfaceID)(uintptr_t)src->data[3]) {
1940  index = i;
1941  break;
1942  }
1943  }
1944 #endif
1945 #if CONFIG_D3D11VA
1946  case AV_PIX_FMT_D3D11:
1947  {
1948  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1949  if (pair->first == src->data[0]
1950  && (pair->second == src->data[1]
1951  || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
1952  index = i;
1953  break;
1954  }
1955  }
1956 #endif
1957 #if CONFIG_DXVA2
1958  case AV_PIX_FMT_DXVA2_VLD:
1959  {
1960  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1961  if (pair->first == src->data[3]) {
1962  index = i;
1963  break;
1964  }
1965  }
1966 #endif
1967  }
1968  }
1969  if (index < 0) {
1970  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1971  "is not in the mapped frames context.\n");
1972  return AVERROR(EINVAL);
1973  }
1974 
1976  dst, src, NULL, NULL);
1977  if (err)
1978  return err;
1979 
1980  dst->width = src->width;
1981  dst->height = src->height;
1982  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1983 
1984  return 0;
1985 }
1986 
1988  const void *hwconfig,
1989  AVHWFramesConstraints *constraints)
1990 {
1991  int i;
1992 
1994  sizeof(*constraints->valid_sw_formats));
1995  if (!constraints->valid_sw_formats)
1996  return AVERROR(ENOMEM);
1997 
1998  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1999  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
2001 
2002  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
2003  if (!constraints->valid_hw_formats)
2004  return AVERROR(ENOMEM);
2005 
2006  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
2007  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
2008 
2009  return 0;
2010 }
2011 
2013 {
2014  AVQSVDeviceContext *hwctx = ctx->hwctx;
2015  QSVDevicePriv *priv = ctx->user_opaque;
2016 
2017  if (hwctx->session)
2018  MFXClose(hwctx->session);
2019 
2020  if (hwctx->loader)
2021  MFXUnload(hwctx->loader);
2023  av_freep(&priv);
2024 }
2025 
2026 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
2027 {
2028  static const struct {
2029  const char *name;
2030  mfxIMPL impl;
2031  } impl_map[] = {
2032  { "auto", MFX_IMPL_AUTO },
2033  { "sw", MFX_IMPL_SOFTWARE },
2034  { "hw", MFX_IMPL_HARDWARE },
2035  { "auto_any", MFX_IMPL_AUTO_ANY },
2036  { "hw_any", MFX_IMPL_HARDWARE_ANY },
2037  { "hw2", MFX_IMPL_HARDWARE2 },
2038  { "hw3", MFX_IMPL_HARDWARE3 },
2039  { "hw4", MFX_IMPL_HARDWARE4 },
2040  };
2041 
2042  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
2043  int i;
2044 
2045  if (device) {
2046  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
2047  if (!strcmp(device, impl_map[i].name)) {
2048  impl = impl_map[i].impl;
2049  break;
2050  }
2051  if (i == FF_ARRAY_ELEMS(impl_map))
2052  impl = strtol(device, NULL, 0);
2053  }
2054 
2055  if (impl != MFX_IMPL_SOFTWARE) {
2056  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
2057  impl |= MFX_IMPL_VIA_D3D11;
2058  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
2059  impl |= MFX_IMPL_VIA_D3D9;
2060  }
2061 
2062  return impl;
2063 }
2064 
2066  mfxIMPL implementation,
2067  AVHWDeviceContext *child_device_ctx,
2068  int flags)
2069 {
2070  AVQSVDeviceContext *hwctx = ctx->hwctx;
2071 
2072  mfxVersion ver = { { 3, 1 } };
2073  mfxHDL handle;
2074  mfxHandleType handle_type;
2075  mfxStatus err;
2076  int ret;
2077 
2078  switch (child_device_ctx->type) {
2079 #if CONFIG_VAAPI
2081  {
2082  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2083  handle_type = MFX_HANDLE_VA_DISPLAY;
2084  handle = (mfxHDL)child_device_hwctx->display;
2085  }
2086  break;
2087 #endif
2088 #if CONFIG_D3D11VA
2090  {
2091  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2092  handle_type = MFX_HANDLE_D3D11_DEVICE;
2093  handle = (mfxHDL)child_device_hwctx->device;
2094  }
2095  break;
2096 #endif
2097 #if CONFIG_DXVA2
2099  {
2100  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2101  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
2102  handle = (mfxHDL)child_device_hwctx->devmgr;
2103  }
2104  break;
2105 #endif
2106  default:
2107  ret = AVERROR(ENOSYS);
2108  goto fail;
2109  }
2110 
2111  ret = qsv_create_mfx_session(ctx, handle, handle_type, implementation, &ver,
2112  &hwctx->session, &hwctx->loader);
2113  if (ret)
2114  goto fail;
2115 
2116  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
2117  if (err != MFX_ERR_NONE) {
2118  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
2119  "%d\n", err);
2120  ret = AVERROR_UNKNOWN;
2121  goto fail;
2122  }
2123 
2124  return 0;
2125 
2126 fail:
2127  if (hwctx->session)
2128  MFXClose(hwctx->session);
2129 
2130  if (hwctx->loader)
2131  MFXUnload(hwctx->loader);
2132 
2133  hwctx->session = NULL;
2134  hwctx->loader = NULL;
2135  return ret;
2136 }
2137 
2139  AVHWDeviceContext *child_device_ctx,
2140  AVDictionary *opts, int flags)
2141 {
2142  mfxIMPL impl;
2143  QSVDevicePriv *priv;
2144 
2145  priv = av_mallocz(sizeof(*priv));
2146  if (!priv)
2147  return AVERROR(ENOMEM);
2148 
2149  ctx->user_opaque = priv;
2150  ctx->free = qsv_device_free;
2151 
2152  impl = choose_implementation("hw_any", child_device_ctx->type);
2153  return qsv_device_derive_from_child(ctx, impl,
2154  child_device_ctx, flags);
2155 }
2156 
2157 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
2158  AVDictionary *opts, int flags)
2159 {
2160  QSVDevicePriv *priv;
2161  enum AVHWDeviceType child_device_type;
2162  AVHWDeviceContext *child_device;
2163  AVDictionary *child_device_opts;
2164  AVDictionaryEntry *e;
2165 
2166  mfxIMPL impl;
2167  int ret;
2168 
2169  priv = av_mallocz(sizeof(*priv));
2170  if (!priv)
2171  return AVERROR(ENOMEM);
2172 
2173  ctx->user_opaque = priv;
2174  ctx->free = qsv_device_free;
2175 
2176  e = av_dict_get(opts, "child_device_type", NULL, 0);
2177  if (e) {
2178  child_device_type = av_hwdevice_find_type_by_name(e->value);
2179  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
2180  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
2181  "\"%s\".\n", e->value);
2182  return AVERROR(EINVAL);
2183  }
2184 #if QSV_ONEVPL
2185  } else if (CONFIG_D3D11VA) { // Use D3D11 by default if d3d11va is enabled
2187  "Defaulting child_device_type to AV_HWDEVICE_TYPE_D3D11VA for oneVPL."
2188  "Please explicitly set child device type via \"-init_hw_device\" "
2189  "option if needed.\n");
2190  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2191  } else if (CONFIG_DXVA2) {
2192  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2193 #else
2194  } else if (CONFIG_DXVA2) {
2196  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
2197  "with old commandlines. This behaviour will be removed "
2198  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
2199  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2200  } else if (CONFIG_D3D11VA) {
2201  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2202 #endif
2203  } else if (CONFIG_VAAPI) {
2204  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
2205  } else {
2206  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2207  return AVERROR(ENOSYS);
2208  }
2209 
2210 #if CONFIG_VAAPI && defined(_WIN32)
2211  /* AV_HWDEVICE_TYPE_VAAPI on Windows/Libva-win32 not supported */
2212  /* Reject user specified child_device_type or CONFIG_VAAPI on Windows */
2213  if (child_device_type == AV_HWDEVICE_TYPE_VAAPI) {
2214  av_log(ctx, AV_LOG_ERROR, "VAAPI child device type not supported for oneVPL on Windows"
2215  "\"%s\".\n", e->value);
2216  return AVERROR(EINVAL);
2217  }
2218 #endif
2219 
2220  child_device_opts = NULL;
2221  switch (child_device_type) {
2222 #if CONFIG_VAAPI
2224  {
2225  // libmfx does not actually implement VAAPI properly, rather it
2226  // depends on the specific behaviour of a matching iHD driver when
2227  // used on recent Intel hardware. Set options to the VAAPI device
2228  // creation so that we should pick a usable setup by default if
2229  // possible, even when multiple devices and drivers are available.
2230  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
2231  av_dict_set(&child_device_opts, "driver", "iHD", 0);
2232  }
2233  break;
2234 #endif
2235 #if CONFIG_D3D11VA
2237  {
2238  // Make sure the hardware vendor is Intel when multiple devices are
2239  // available, it will be ignored if user specifies the child device
2240  // explicitly
2241  av_dict_set(&child_device_opts, "vendor_id", "0x8086", 0);
2242  }
2243  break;
2244 #endif
2245 #if CONFIG_DXVA2
2247 #if QSV_ONEVPL
2248  {
2250  "d3d11va is not available or child device type is set to dxva2 "
2251  "explicitly for oneVPL.\n");
2252  }
2253 #endif
2254  break;
2255 #endif
2256  default:
2257  {
2258  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2259  return AVERROR(ENOSYS);
2260  }
2261  break;
2262  }
2263 
2264  e = av_dict_get(opts, "child_device", NULL, 0);
2265  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
2266  e ? e->value : NULL, child_device_opts, 0);
2267 
2268  av_dict_free(&child_device_opts);
2269  if (ret < 0)
2270  return ret;
2271 
2272  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
2273 
2274  impl = choose_implementation(device, child_device_type);
2275 
2276  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
2277 }
2278 
2281  .name = "QSV",
2282 
2283  .device_hwctx_size = sizeof(QSVDeviceContext),
2284  .frames_hwctx_size = sizeof(QSVFramesContext),
2285 
2286  .device_create = qsv_device_create,
2287  .device_derive = qsv_device_derive,
2288  .device_init = qsv_device_init,
2289  .frames_get_constraints = qsv_frames_get_constraints,
2290  .frames_init = qsv_frames_init,
2291  .frames_uninit = qsv_frames_uninit,
2292  .frames_get_buffer = qsv_get_buffer,
2293  .transfer_get_formats = qsv_transfer_get_formats,
2294  .transfer_data_to = qsv_transfer_data_to,
2295  .transfer_data_from = qsv_transfer_data_from,
2296  .map_to = qsv_map_to,
2297  .map_from = qsv_map_from,
2298  .frames_derive_to = qsv_frames_derive_to,
2299  .frames_derive_from = qsv_frames_derive_from,
2300 
2301  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
2302 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
FFHWFramesContext::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:101
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1519
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:60
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:55
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:288
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:81
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:105
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1731
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1416
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:198
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
QSVDeviceContext::p
AVQSVDeviceContext p
The public AVQSVDeviceContext.
Definition: hwcontext_qsv.c:80
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:235
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:84
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVFrame::width
int width
Definition: frame.h:446
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2138
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1344
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:536
data
const char data[16]
Definition: mxf.c:148
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:2026
QSVDeviceContext
Definition: hwcontext_qsv.c:76
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:102
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:534
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:34
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:726
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:446
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:125
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:208
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:587
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:83
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1648
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:441
QSVDevicePriv
Definition: hwcontext_qsv.c:72
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:87
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:391
fail
#define fail()
Definition: checkasm.h:182
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
dummy
int dummy
Definition: motion.c:66
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1987
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:98
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1824
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: hwcontext_qsv.c:58
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:651
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:532
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
ffhwframesctx
static FFHWFramesContext * ffhwframesctx(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:115
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:161
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:453
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:117
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:100
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:623
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:62
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:447
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:73
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:59
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:82
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:111
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
MFXUnload
#define MFXUnload(a)
Definition: hwcontext_qsv.c:69
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:2279
qsv_create_mfx_session
static int qsv_create_mfx_session(void *ctx, mfxHDL handle, mfxHandleType handle_type, mfxIMPL implementation, mfxVersion *pver, mfxSession *psession, void **ploader)
Definition: hwcontext_qsv.c:1062
opts
AVDictionary * opts
Definition: movenc.c:51
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:54
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:339
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:62
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1622
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:1240
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:1553
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:661
index
int index
Definition: gxfenc.c:90
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:82
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:119
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:1121
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:116
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:1312
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:529
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:999
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:108
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:2012
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:171
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:1326
buffer.h
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:533
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:2065
AVQSVDeviceContext::loader
void * loader
The mfxLoader handle used for mfxSession creation.
Definition: hwcontext_qsv.h:47
supported_pixel_formats
static const struct @401 supported_pixel_formats[]
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:55
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:666
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:126
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:283
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:107
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:114
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:606
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:97
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:264
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:403
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1927
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:124
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:85
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:120
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:433
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:656
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:725
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:372
AVFrame::height
int height
Definition: frame.h:446
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:88
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:99
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2157
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:528
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:187
desc
const char * desc
Definition: libsvtav1.c:79
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
mfx_shift
uint16_t mfx_shift
Definition: hwcontext_qsv.c:126
qsv_shift_from_pix_fmt
static uint16_t qsv_shift_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:208
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:106
AVDictionaryEntry
Definition: dict.h:89
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:114
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:535
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:376
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:91
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:572
QSVFramesContext::p
AVQSVFramesContext p
The public AVQSVFramesContext.
Definition: hwcontext_qsv.c:95
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:78