FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include <initguid.h>
40 #include "hwcontext_dxva2.h"
41 #endif
42 
43 #include "buffer.h"
44 #include "common.h"
45 #include "hwcontext.h"
46 #include "hwcontext_internal.h"
47 #include "hwcontext_qsv.h"
48 #include "mem.h"
49 #include "pixfmt.h"
50 #include "pixdesc.h"
51 #include "time.h"
52 #include "imgutils.h"
53 #include "avassert.h"
54 
55 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
56  (MFX_VERSION_MAJOR > (MAJOR) || \
57  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
58 
59 #define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR) \
60  ((MFX_VERSION.Major > (MAJOR)) || \
61  (MFX_VERSION.Major == (MAJOR) && MFX_VERSION.Minor >= (MINOR)))
62 
63 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
64 #define QSV_ONEVPL QSV_VERSION_ATLEAST(2, 0)
65 #define QSV_HAVE_OPAQUE !QSV_ONEVPL
66 
67 #if QSV_ONEVPL
68 #include <mfxdispatcher.h>
69 #else
70 #define MFXUnload(a) do { } while(0)
71 #endif
72 
73 typedef struct QSVDevicePriv {
76 
77 typedef struct QSVDeviceContext {
78  /**
79  * The public AVQSVDeviceContext. See hwcontext_qsv.h for it.
80  */
82 
83  mfxHDL handle;
84  mfxHandleType handle_type;
85  mfxVersion ver;
86  mfxIMPL impl;
87 
91 
92 typedef struct QSVFramesContext {
93  /**
94  * The public AVQSVFramesContext. See hwcontext_qsv.h for it.
95  */
97 
98  mfxSession session_download;
100  mfxSession session_upload;
102 #if HAVE_PTHREADS
103  pthread_mutex_t session_lock;
104 #endif
105 
107  mfxFrameSurface1 *surfaces_internal;
110 
111  // used in the frame allocator for non-opaque surfaces
112  mfxMemId *mem_ids;
113 #if QSV_HAVE_OPAQUE
114  // used in the opaque alloc request for opaque surfaces
115  mfxFrameSurface1 **surface_ptrs;
116 
117  mfxExtOpaqueSurfaceAlloc opaque_alloc;
118  mfxExtBuffer *ext_buffers[1];
119 #endif
122 
123  mfxFrameInfo frame_info;
125 
126 typedef struct QSVSurface {
127  mfxFrameSurface1 mfx_surface;
129 } QSVSurface;
130 
131 static const struct {
133  uint32_t fourcc;
134  uint16_t mfx_shift;
136  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12, 0 },
137  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4, 0 },
138  { AV_PIX_FMT_P010, MFX_FOURCC_P010, 1 },
139  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8, 0 },
141  MFX_FOURCC_YUY2, 0 },
142 #if CONFIG_VAAPI
144  MFX_FOURCC_UYVY, 0 },
145 #endif
146  { AV_PIX_FMT_Y210,
147  MFX_FOURCC_Y210, 1 },
148  // VUYX is used for VAAPI child device,
149  // the SDK only delares support for AYUV
150  { AV_PIX_FMT_VUYX,
151  MFX_FOURCC_AYUV, 0 },
152  // XV30 is used for VAAPI child device,
153  // the SDK only delares support for Y410
154  { AV_PIX_FMT_XV30,
155  MFX_FOURCC_Y410, 0 },
156 #if QSV_VERSION_ATLEAST(1, 31)
157  // P012 is used for VAAPI child device,
158  // the SDK only delares support for P016
159  { AV_PIX_FMT_P012,
160  MFX_FOURCC_P016, 1 },
161  // Y212 is used for VAAPI child device,
162  // the SDK only delares support for Y216
163  { AV_PIX_FMT_Y212,
164  MFX_FOURCC_Y216, 1 },
165  // XV36 is used for VAAPI child device,
166  // the SDK only delares support for Y416
167  { AV_PIX_FMT_XV36,
168  MFX_FOURCC_Y416, 1 },
169 #endif
170 };
171 
172 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
173  enum AVHWDeviceType base_dev_type,
174  void **base_handle);
175 
176 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf);
177 
178 /**
179  * Caller needs to allocate enough space for base_handle pointer.
180  **/
181 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
182  enum AVHWDeviceType base_dev_type,
183  void **base_handle)
184 {
185  mfxHDLPair *handle_pair;
186  handle_pair = surf->Data.MemId;
187  switch (base_dev_type) {
188 #if CONFIG_VAAPI
190  base_handle[0] = handle_pair->first;
191  return 0;
192 #endif
193 #if CONFIG_D3D11VA
195  base_handle[0] = handle_pair->first;
196  base_handle[1] = handle_pair->second;
197  return 0;
198 #endif
199 #if CONFIG_DXVA2
201  base_handle[0] = handle_pair->first;
202  return 0;
203 #endif
204  }
205  return AVERROR(EINVAL);
206 }
207 
209 {
210  int i;
211  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
213  return supported_pixel_formats[i].fourcc;
214  }
215  return 0;
216 }
217 
219 {
220  for (int i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
222  return supported_pixel_formats[i].mfx_shift;
223  }
224 
225  return 0;
226 }
227 
228 #if CONFIG_D3D11VA
229 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
230 {
231  uint32_t bind_flags = 0;
232 
233  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
234  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
235  else
236  bind_flags = D3D11_BIND_DECODER;
237 
238  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
239  bind_flags = D3D11_BIND_RENDER_TARGET;
240 
241  return bind_flags;
242 }
243 #endif
244 
245 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
246 {
247  const AVPixFmtDescriptor *desc;
248  int i, planes_nb = 0;
249  if (dst->format != src->format)
250  return AVERROR(EINVAL);
251 
253 
254  for (i = 0; i < desc->nb_components; i++)
255  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
256 
257  for (i = 0; i < planes_nb; i++) {
258  int sheight, dheight, y;
259  ptrdiff_t swidth = av_image_get_linesize(src->format,
260  src->width,
261  i);
262  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
263  dst->width,
264  i);
265  const AVComponentDescriptor comp = desc->comp[i];
266  if (swidth < 0 || dwidth < 0) {
267  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
268  return AVERROR(EINVAL);
269  }
270  sheight = src->height;
271  dheight = dst->height;
272  if (i) {
273  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
274  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
275  }
276  //fill right padding
277  for (y = 0; y < sheight; y++) {
278  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
279  av_memcpy_backptr(line_ptr,
280  comp.depth > 8 ? 2 : 1,
281  dwidth - swidth);
282  }
283  //fill bottom padding
284  for (y = sheight; y < dheight; y++) {
285  memcpy(dst->data[i]+y*dst->linesize[i],
286  dst->data[i]+(sheight-1)*dst->linesize[i],
287  dwidth);
288  }
289  }
290  return 0;
291 }
292 
294 {
295  QSVDeviceContext *s = ctx->hwctx;
296  AVQSVDeviceContext *hwctx = &s->p;
297  int hw_handle_supported = 0;
298  mfxHandleType handle_type;
299  enum AVHWDeviceType device_type;
300  enum AVPixelFormat pix_fmt;
301  mfxStatus err;
302 
303  err = MFXQueryIMPL(hwctx->session, &s->impl);
304  if (err == MFX_ERR_NONE)
305  err = MFXQueryVersion(hwctx->session, &s->ver);
306  if (err != MFX_ERR_NONE) {
307  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
308  return AVERROR_UNKNOWN;
309  }
310 
311  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
312 #if CONFIG_VAAPI
313  handle_type = MFX_HANDLE_VA_DISPLAY;
314  device_type = AV_HWDEVICE_TYPE_VAAPI;
316  hw_handle_supported = 1;
317 #endif
318  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
319 #if CONFIG_D3D11VA
320  handle_type = MFX_HANDLE_D3D11_DEVICE;
321  device_type = AV_HWDEVICE_TYPE_D3D11VA;
323  hw_handle_supported = 1;
324 #endif
325  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
326 #if CONFIG_DXVA2
327  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
328  device_type = AV_HWDEVICE_TYPE_DXVA2;
330  hw_handle_supported = 1;
331 #endif
332  }
333 
334  if (hw_handle_supported) {
335  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
336  if (err == MFX_ERR_NONE) {
337  s->handle_type = handle_type;
338  s->child_device_type = device_type;
339  s->child_pix_fmt = pix_fmt;
340  }
341  }
342  if (!s->handle) {
343  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
344  "from the session\n");
345  }
346  return 0;
347 }
348 
350 {
351  QSVFramesContext *s = ctx->hwctx;
352 
353  if (s->session_download) {
354  MFXVideoVPP_Close(s->session_download);
355  MFXClose(s->session_download);
356  }
357  s->session_download = NULL;
358  s->session_download_init = 0;
359 
360  if (s->session_upload) {
361  MFXVideoVPP_Close(s->session_upload);
362  MFXClose(s->session_upload);
363  }
364  s->session_upload = NULL;
365  s->session_upload_init = 0;
366 
367 #if HAVE_PTHREADS
368  pthread_mutex_destroy(&s->session_lock);
369 #endif
370 
371  av_freep(&s->mem_ids);
372 #if QSV_HAVE_OPAQUE
373  av_freep(&s->surface_ptrs);
374 #endif
375  av_freep(&s->surfaces_internal);
376  av_freep(&s->handle_pairs_internal);
377  av_frame_unref(&s->realigned_upload_frame);
378  av_frame_unref(&s->realigned_download_frame);
379  av_buffer_unref(&s->child_frames_ref);
380 }
381 
382 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
383 {
384 }
385 
386 static void qsv_pool_release(void *opaque, uint8_t *data)
387 {
389  QSVFramesContext *s = ctx->hwctx;
390  QSVSurface *qsv_surface = (QSVSurface *)data;
391  mfxHDLPair *hdl_pair = (mfxHDLPair *)qsv_surface->mfx_surface.Data.MemId;
392  AVHWFramesContext *child_frames_ctx;
393 
394  if (!s->child_frames_ref)
395  return;
396 
397  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
398  if (!child_frames_ctx->device_ctx)
399  return;
400 
401 #if CONFIG_VAAPI
402  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_VAAPI)
403  av_freep(&hdl_pair->first);
404 #endif
405 
406  av_freep(&hdl_pair);
407  av_frame_free(&qsv_surface->child_frame);
408  av_freep(&qsv_surface);
409 }
410 
411 static AVBufferRef *qsv_fixed_pool_alloc(void *opaque, size_t size)
412 {
414  QSVFramesContext *s = ctx->hwctx;
415  AVQSVFramesContext *hwctx = &s->p;
416 
417  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
418  s->nb_surfaces_used++;
419  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
420  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
421  }
422 
423  return NULL;
424 }
425 
426 static AVBufferRef *qsv_dynamic_pool_alloc(void *opaque, size_t size)
427 {
429  QSVFramesContext *s = ctx->hwctx;
430  AVHWFramesContext *child_frames_ctx;
431  QSVSurface *qsv_surface = NULL;
432  mfxHDLPair *handle_pairs_internal = NULL;
433  int ret;
434 
435  if (!s->child_frames_ref)
436  goto fail;
437 
438  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
439  if (!child_frames_ctx->device_ctx)
440  goto fail;
441 
442 #if CONFIG_DXVA2
443  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
445  "QSV on dxva2 requires a fixed frame pool size\n");
446  goto fail;
447  }
448 #endif
449 
450  qsv_surface = av_calloc(1, sizeof(*qsv_surface));
451  if (!qsv_surface)
452  goto fail;
453 
454  qsv_surface->child_frame = av_frame_alloc();
455  if (!qsv_surface->child_frame)
456  goto fail;
457 
458  ret = av_hwframe_get_buffer(s->child_frames_ref, qsv_surface->child_frame, 0);
459  if (ret < 0)
460  goto fail;
461 
462  handle_pairs_internal = av_calloc(1, sizeof(*handle_pairs_internal));
463  if (!handle_pairs_internal)
464  goto fail;
465 
466  ret = qsv_init_surface(ctx, &qsv_surface->mfx_surface);
467  if (ret < 0)
468  goto fail;
469 
470 #if CONFIG_VAAPI
471  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
472  VASurfaceID *surface_id_internal;
473 
474  surface_id_internal = av_calloc(1, sizeof(*surface_id_internal));
475  if (!surface_id_internal)
476  goto fail;
477 
478  *surface_id_internal = (VASurfaceID)(uintptr_t)qsv_surface->child_frame->data[3];
479  handle_pairs_internal->first = (mfxHDL)surface_id_internal;
480  handle_pairs_internal->second = (mfxMemId)MFX_INFINITE;
481  }
482 #endif
483 
484 #if CONFIG_D3D11VA
485  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
486  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
487  handle_pairs_internal->first = (mfxMemId)qsv_surface->child_frame->data[0];
488 
489  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET)
490  handle_pairs_internal->second = (mfxMemId)MFX_INFINITE;
491  else
492  handle_pairs_internal->second = (mfxMemId)qsv_surface->child_frame->data[1];
493 
494  }
495 #endif
496 
497  qsv_surface->mfx_surface.Data.MemId = (mfxMemId)handle_pairs_internal;
498  return av_buffer_create((uint8_t *)qsv_surface, sizeof(*qsv_surface),
499  qsv_pool_release, ctx, 0);
500 
501 fail:
502  if (qsv_surface) {
503  av_frame_free(&qsv_surface->child_frame);
504  }
505 
506  av_freep(&qsv_surface);
507  av_freep(&handle_pairs_internal);
508 
509  return NULL;
510 }
511 
512 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
513 {
515  AVQSVFramesContext *hwctx = ctx->hwctx;
516 
517  if (hwctx->nb_surfaces == 0) {
518  return qsv_dynamic_pool_alloc(opaque, size);
519  } else {
520  return qsv_fixed_pool_alloc(opaque, size);
521  }
522 }
523 
525 {
526  QSVDeviceContext *device_priv = ctx->device_ctx->hwctx;
527  QSVFramesContext *s = ctx->hwctx;
528  AVQSVFramesContext *hwctx = &s->p;
529 
530  AVBufferRef *child_device_ref = NULL;
531  AVBufferRef *child_frames_ref = NULL;
532 
533  AVHWDeviceContext *child_device_ctx;
534  AVHWFramesContext *child_frames_ctx;
535 
536  int i, ret = 0;
537 
538  if (!device_priv->handle) {
540  "Cannot create a non-opaque internal surface pool without "
541  "a hardware handle\n");
542  return AVERROR(EINVAL);
543  }
544 
545  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
546  if (!child_device_ref)
547  return AVERROR(ENOMEM);
548  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
549 
550 #if CONFIG_VAAPI
551  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
552  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
553  child_device_hwctx->display = (VADisplay)device_priv->handle;
554  }
555 #endif
556 #if CONFIG_D3D11VA
557  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
558  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
559  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
560  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
561  }
562 #endif
563 #if CONFIG_DXVA2
564  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
565  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
566  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
567  }
568 #endif
569 
570  ret = av_hwdevice_ctx_init(child_device_ref);
571  if (ret < 0) {
572  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
573  goto fail;
574  }
575 
576  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
577  if (!child_frames_ref) {
578  ret = AVERROR(ENOMEM);
579  goto fail;
580  }
581  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
582 
583  child_frames_ctx->format = device_priv->child_pix_fmt;
584  child_frames_ctx->sw_format = ctx->sw_format;
585  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
586  child_frames_ctx->width = FFALIGN(ctx->width, 16);
587  child_frames_ctx->height = FFALIGN(ctx->height, 16);
588 
589 #if CONFIG_D3D11VA
590  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
591  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
592  if (hwctx->frame_type == 0)
593  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
594  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
595  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
596  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
597  }
598 #endif
599 #if CONFIG_DXVA2
600  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
601  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
602  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
603  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
604  else
605  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
606  }
607 #endif
608 
609  ret = av_hwframe_ctx_init(child_frames_ref);
610  if (ret < 0) {
611  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
612  goto fail;
613  }
614 
615 #if CONFIG_VAAPI
616  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
617  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
618  for (i = 0; i < ctx->initial_pool_size; i++) {
619  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
620  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
621  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
622  }
623  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
624  }
625 #endif
626 #if CONFIG_D3D11VA
627  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
628  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
629  for (i = 0; i < ctx->initial_pool_size; i++) {
630  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
631  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
632  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
633  } else {
634  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
635  }
636  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
637  }
638  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
639  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
640  } else {
641  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
642  }
643  }
644 #endif
645 #if CONFIG_DXVA2
646  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
647  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
648  for (i = 0; i < ctx->initial_pool_size; i++) {
649  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
650  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
651  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
652  }
653  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
654  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
655  else
656  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
657  }
658 #endif
659 
660  s->child_frames_ref = child_frames_ref;
661  child_frames_ref = NULL;
662 
663 fail:
664  av_buffer_unref(&child_device_ref);
665  av_buffer_unref(&child_frames_ref);
666  return ret;
667 }
668 
669 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
670 {
671  const AVPixFmtDescriptor *desc;
672  uint32_t fourcc;
673 
674  desc = av_pix_fmt_desc_get(ctx->sw_format);
675  if (!desc)
676  return AVERROR(EINVAL);
677 
678  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
679  if (!fourcc)
680  return AVERROR(EINVAL);
681 
682  surf->Info.BitDepthLuma = desc->comp[0].depth;
683  surf->Info.BitDepthChroma = desc->comp[0].depth;
684  surf->Info.Shift = qsv_shift_from_pix_fmt(ctx->sw_format);
685 
686  if (desc->log2_chroma_w && desc->log2_chroma_h)
687  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
688  else if (desc->log2_chroma_w)
689  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
690  else
691  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
692 
693  surf->Info.FourCC = fourcc;
694  surf->Info.Width = FFALIGN(ctx->width, 16);
695  surf->Info.CropW = ctx->width;
696  surf->Info.Height = FFALIGN(ctx->height, 16);
697  surf->Info.CropH = ctx->height;
698  surf->Info.FrameRateExtN = 25;
699  surf->Info.FrameRateExtD = 1;
700  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
701 
702  return 0;
703 }
704 
706 {
707  QSVFramesContext *s = ctx->hwctx;
708  AVQSVFramesContext *frames_hwctx = &s->p;
709 
710  int i, ret = 0;
711 
712  if (ctx->initial_pool_size < 0) {
713  av_log(ctx, AV_LOG_ERROR, "Invalid frame pool size\n");
714  return AVERROR(EINVAL);
715  } else if (ctx->initial_pool_size == 0) {
716  mfxFrameSurface1 mfx_surf1;
717 
719  if (ret < 0)
720  return ret;
721 
722  ffhwframesctx(ctx)->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
724  if (!ffhwframesctx(ctx)->pool_internal)
725  return AVERROR(ENOMEM);
726 
727  memset(&mfx_surf1, 0, sizeof(mfx_surf1));
728  qsv_init_surface(ctx, &mfx_surf1);
729  s->frame_info = mfx_surf1.Info;
730  frames_hwctx->info = &s->frame_info;
731  frames_hwctx->nb_surfaces = 0;
732 
733  return 0;
734  }
735 
736  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
737  sizeof(*s->handle_pairs_internal));
738  if (!s->handle_pairs_internal)
739  return AVERROR(ENOMEM);
740 
741  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
742  sizeof(*s->surfaces_internal));
743  if (!s->surfaces_internal)
744  return AVERROR(ENOMEM);
745 
746  for (i = 0; i < ctx->initial_pool_size; i++) {
747  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
748  if (ret < 0)
749  return ret;
750  }
751 
752 #if QSV_HAVE_OPAQUE
753  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
755  if (ret < 0)
756  return ret;
757  }
758 #else
760  if (ret < 0)
761  return ret;
762 #endif
763 
764  ffhwframesctx(ctx)->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
766  if (!ffhwframesctx(ctx)->pool_internal)
767  return AVERROR(ENOMEM);
768 
769  frames_hwctx->surfaces = s->surfaces_internal;
770  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
771 
772  return 0;
773 }
774 
775 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
776  mfxFrameAllocResponse *resp)
777 {
778  AVHWFramesContext *ctx = pthis;
779  QSVFramesContext *s = ctx->hwctx;
780  AVQSVFramesContext *hwctx = &s->p;
781  mfxFrameInfo *i = &req->Info;
782  mfxFrameInfo *i1 = hwctx->nb_surfaces ? &hwctx->surfaces[0].Info : hwctx->info;
783 
784  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
785  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
786  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
787  return MFX_ERR_UNSUPPORTED;
788  if (i->Width > i1->Width || i->Height > i1->Height ||
789  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
790  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
791  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
792  i->Width, i->Height, i->FourCC, i->ChromaFormat,
793  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
794  return MFX_ERR_UNSUPPORTED;
795  }
796 
797  resp->mids = s->mem_ids;
798  resp->NumFrameActual = hwctx->nb_surfaces;
799 
800  return MFX_ERR_NONE;
801 }
802 
803 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
804 {
805  return MFX_ERR_NONE;
806 }
807 
808 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
809 {
810  return MFX_ERR_UNSUPPORTED;
811 }
812 
813 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
814 {
815  return MFX_ERR_UNSUPPORTED;
816 }
817 
818 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
819 {
820  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
821  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
822 
823  pair_dst->first = pair_src->first;
824 
825  if (pair_src->second != (mfxMemId)MFX_INFINITE)
826  pair_dst->second = pair_src->second;
827  return MFX_ERR_NONE;
828 }
829 
830 #if QSV_ONEVPL
831 
832 static int qsv_d3d11_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
833 {
834  int ret = AVERROR_UNKNOWN;
835 #if CONFIG_D3D11VA
836  mfxStatus sts;
837  IDXGIAdapter *pDXGIAdapter;
838  DXGI_ADAPTER_DESC adapterDesc;
839  IDXGIDevice *pDXGIDevice = NULL;
840  HRESULT hr;
841  ID3D11Device *device = handle;
842  mfxVariant impl_value;
843 
844  hr = ID3D11Device_QueryInterface(device, &IID_IDXGIDevice, (void**)&pDXGIDevice);
845  if (SUCCEEDED(hr)) {
846  hr = IDXGIDevice_GetAdapter(pDXGIDevice, &pDXGIAdapter);
847  if (FAILED(hr)) {
848  av_log(ctx, AV_LOG_ERROR, "Error IDXGIDevice_GetAdapter %d\n", hr);
849  IDXGIDevice_Release(pDXGIDevice);
850  return ret;
851  }
852 
853  hr = IDXGIAdapter_GetDesc(pDXGIAdapter, &adapterDesc);
854  if (FAILED(hr)) {
855  av_log(ctx, AV_LOG_ERROR, "Error IDXGIAdapter_GetDesc %d\n", hr);
856  goto fail;
857  }
858  } else {
859  av_log(ctx, AV_LOG_ERROR, "Error ID3D11Device_QueryInterface %d\n", hr);
860  return ret;
861  }
862 
863  impl_value.Type = MFX_VARIANT_TYPE_U16;
864  impl_value.Data.U16 = adapterDesc.DeviceId;
865  sts = MFXSetConfigFilterProperty(cfg,
866  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
867  if (sts != MFX_ERR_NONE) {
868  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
869  "DeviceID property: %d.\n", sts);
870  goto fail;
871  }
872 
873  impl_value.Type = MFX_VARIANT_TYPE_PTR;
874  impl_value.Data.Ptr = &adapterDesc.AdapterLuid;
875  sts = MFXSetConfigFilterProperty(cfg,
876  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
877  if (sts != MFX_ERR_NONE) {
878  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
879  "DeviceLUID property: %d.\n", sts);
880  goto fail;
881  }
882 
883  impl_value.Type = MFX_VARIANT_TYPE_U32;
884  impl_value.Data.U32 = 0x0001;
885  sts = MFXSetConfigFilterProperty(cfg,
886  (const mfxU8 *)"mfxExtendedDeviceId.LUIDDeviceNodeMask", impl_value);
887  if (sts != MFX_ERR_NONE) {
888  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
889  "LUIDDeviceNodeMask property: %d.\n", sts);
890  goto fail;
891  }
892 
893  ret = 0;
894 
895 fail:
896  IDXGIAdapter_Release(pDXGIAdapter);
897  IDXGIDevice_Release(pDXGIDevice);
898 #endif
899  return ret;
900 }
901 
902 static int qsv_d3d9_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
903 {
904  int ret = AVERROR_UNKNOWN;
905 #if CONFIG_DXVA2
906  mfxStatus sts;
907  IDirect3DDeviceManager9* devmgr = handle;
908  IDirect3DDevice9 *device = NULL;
909  IDirect3DDevice9Ex *device_ex = NULL;
910  HANDLE device_handle = 0;
911  IDirect3D9Ex *d3d9ex = NULL;
912  IDirect3D9 *d3d9 = NULL;
913  LUID luid;
914  D3DDEVICE_CREATION_PARAMETERS params;
915  HRESULT hr;
916  mfxVariant impl_value;
917 
918  hr = IDirect3DDeviceManager9_OpenDeviceHandle(devmgr, &device_handle);
919  if (FAILED(hr)) {
920  av_log(ctx, AV_LOG_ERROR, "Error OpenDeviceHandle %d\n", hr);
921  goto fail;
922  }
923 
924  hr = IDirect3DDeviceManager9_LockDevice(devmgr, device_handle, &device, TRUE);
925  if (FAILED(hr)) {
926  av_log(ctx, AV_LOG_ERROR, "Error LockDevice %d\n", hr);
927  IDirect3DDeviceManager9_CloseDeviceHandle(devmgr, device_handle);
928  goto fail;
929  }
930  hr = IDirect3DDevice9_QueryInterface(device, &IID_IDirect3DDevice9Ex, (void **)&device_ex);
931  IDirect3DDevice9_Release(device);
932  if (FAILED(hr)) {
933  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9_QueryInterface %d\n", hr);
934  goto unlock;
935  }
936 
937  hr = IDirect3DDevice9Ex_GetCreationParameters(device_ex, &params);
938  if (FAILED(hr)) {
939  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9_GetCreationParameters %d\n", hr);
940  IDirect3DDevice9Ex_Release(device_ex);
941  goto unlock;
942  }
943 
944  hr = IDirect3DDevice9Ex_GetDirect3D(device_ex, &d3d9);
945  if (FAILED(hr)) {
946  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetDirect3D %d\n", hr);
947  IDirect3DDevice9Ex_Release(device_ex);
948  goto unlock;
949  }
950  hr = IDirect3D9_QueryInterface(d3d9, &IID_IDirect3D9Ex, (void **)&d3d9ex);
951  IDirect3D9_Release(d3d9);
952  if (FAILED(hr)) {
953  av_log(ctx, AV_LOG_ERROR, "Error IDirect3D9_QueryInterface3D %d\n", hr);
954  IDirect3DDevice9Ex_Release(device_ex);
955  goto unlock;
956  }
957 
958  hr = IDirect3D9Ex_GetAdapterLUID(d3d9ex, params.AdapterOrdinal, &luid);
959  if (FAILED(hr)) {
960  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
961  goto release;
962  }
963 
964  impl_value.Type = MFX_VARIANT_TYPE_PTR;
965  impl_value.Data.Ptr = &luid;
966  sts = MFXSetConfigFilterProperty(cfg,
967  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
968  if (sts != MFX_ERR_NONE) {
969  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
970  "DeviceLUID property: %d.\n", sts);
971  goto release;
972  }
973 
974  ret = 0;
975 
976 release:
977  IDirect3D9Ex_Release(d3d9ex);
978  IDirect3DDevice9Ex_Release(device_ex);
979 
980 unlock:
981  IDirect3DDeviceManager9_UnlockDevice(devmgr, device_handle, FALSE);
982  IDirect3DDeviceManager9_CloseDeviceHandle(devmgr, device_handle);
983 fail:
984 #endif
985  return ret;
986 }
987 
988 static int qsv_va_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
989 {
990 #if CONFIG_VAAPI
991 #if VA_CHECK_VERSION(1, 15, 0)
992  mfxStatus sts;
993  VADisplay dpy = handle;
994  VAStatus vas;
995  VADisplayAttribute attr = {
996  .type = VADisplayPCIID,
997  };
998  mfxVariant impl_value;
999 
1000  vas = vaGetDisplayAttributes(dpy, &attr, 1);
1001  if (vas == VA_STATUS_SUCCESS && attr.flags != VA_DISPLAY_ATTRIB_NOT_SUPPORTED) {
1002  impl_value.Type = MFX_VARIANT_TYPE_U16;
1003  impl_value.Data.U16 = (attr.value & 0xFFFF);
1004  sts = MFXSetConfigFilterProperty(cfg,
1005  (const mfxU8 *)"mfxImplDescription.mfxDeviceDescription.DeviceID", impl_value);
1006  if (sts != MFX_ERR_NONE) {
1007  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
1008  "DeviceID property: %d.\n", sts);
1009  goto fail;
1010  }
1011  } else {
1012  av_log(ctx, AV_LOG_ERROR, "libva: Failed to get device id from the driver. Please "
1013  "consider to upgrade the driver to support VA-API 1.15.0\n");
1014  goto fail;
1015  }
1016 
1017  return 0;
1018 
1019 fail:
1020 #else
1021  av_log(ctx, AV_LOG_ERROR, "libva: This version of libva doesn't support retrieving "
1022  "the device information from the driver. Please consider to upgrade libva to "
1023  "support VA-API 1.15.0\n");
1024 #endif
1025 #endif
1026  return AVERROR_UNKNOWN;
1027 }
1028 
1029 static int qsv_new_mfx_loader(void *ctx,
1030  mfxHDL handle,
1031  mfxHandleType handle_type,
1032  mfxIMPL implementation,
1033  mfxVersion *pver,
1034  void **ploader)
1035 {
1036  mfxStatus sts;
1037  mfxLoader loader = NULL;
1038  mfxConfig cfg;
1039  mfxVariant impl_value;
1040 
1041  *ploader = NULL;
1042  loader = MFXLoad();
1043  if (!loader) {
1044  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
1045  goto fail;
1046  }
1047 
1048  /* Create configurations for implementation */
1049  cfg = MFXCreateConfig(loader);
1050  if (!cfg) {
1051  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX configuration\n");
1052  goto fail;
1053  }
1054 
1055  impl_value.Type = MFX_VARIANT_TYPE_U32;
1056  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
1057  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
1058  sts = MFXSetConfigFilterProperty(cfg,
1059  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
1060  if (sts != MFX_ERR_NONE) {
1061  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
1062  "property: %d.\n", sts);
1063  goto fail;
1064  }
1065 
1066  impl_value.Type = MFX_VARIANT_TYPE_U32;
1067  impl_value.Data.U32 = pver->Version;
1068  sts = MFXSetConfigFilterProperty(cfg,
1069  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
1070  impl_value);
1071  if (sts != MFX_ERR_NONE) {
1072  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
1073  "property: %d.\n", sts);
1074  goto fail;
1075  }
1076 
1077  impl_value.Type = MFX_VARIANT_TYPE_U32;
1078  impl_value.Data.U32 = 0x8086; // Intel device only
1079  sts = MFXSetConfigFilterProperty(cfg,
1080  (const mfxU8 *)"mfxImplDescription.VendorID", impl_value);
1081  if (sts != MFX_ERR_NONE) {
1082  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
1083  "VendorID property: %d.\n", sts);
1084  goto fail;
1085  }
1086 
1087  if (MFX_HANDLE_VA_DISPLAY == handle_type) {
1088  if (handle && qsv_va_update_config(ctx, handle, cfg))
1089  goto fail;
1090 
1091  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_VAAPI;
1092  } else if (MFX_HANDLE_D3D9_DEVICE_MANAGER == handle_type) {
1093  if (handle && qsv_d3d9_update_config(ctx, handle, cfg))
1094  goto fail;
1095 
1096  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D9;
1097  } else {
1098  if (handle && qsv_d3d11_update_config(ctx, handle, cfg))
1099  goto fail;
1100 
1101  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D11;
1102  }
1103 
1104  impl_value.Type = MFX_VARIANT_TYPE_U32;
1105  sts = MFXSetConfigFilterProperty(cfg,
1106  (const mfxU8 *)"mfxImplDescription.AccelerationMode", impl_value);
1107  if (sts != MFX_ERR_NONE) {
1108  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
1109  "AccelerationMode property: %d.\n", sts);
1110  goto fail;
1111  }
1112 
1113  *ploader = loader;
1114 
1115  return 0;
1116 
1117 fail:
1118  if (loader)
1119  MFXUnload(loader);
1120 
1121  return AVERROR_UNKNOWN;
1122 }
1123 
1124 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
1125 {
1126  mfxStatus sts;
1127  mfxSession session = NULL;
1128  uint32_t impl_idx = 0;
1129  mfxVersion ver;
1130 
1131  while (1) {
1132  /* Enumerate all implementations */
1133  mfxImplDescription *impl_desc;
1134 
1135  sts = MFXEnumImplementations(loader, impl_idx,
1136  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
1137  (mfxHDL *)&impl_desc);
1138  /* Failed to find an available implementation */
1139  if (sts == MFX_ERR_NOT_FOUND)
1140  break;
1141  else if (sts != MFX_ERR_NONE) {
1142  impl_idx++;
1143  continue;
1144  }
1145 
1146  sts = MFXCreateSession(loader, impl_idx, &session);
1147  MFXDispReleaseImplDescription(loader, impl_desc);
1148  if (sts == MFX_ERR_NONE)
1149  break;
1150 
1151  impl_idx++;
1152  }
1153 
1154  if (sts != MFX_ERR_NONE) {
1155  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
1156  goto fail;
1157  }
1158 
1159  sts = MFXQueryVersion(session, &ver);
1160  if (sts != MFX_ERR_NONE) {
1161  av_log(ctx, AV_LOG_ERROR, "Error querying a MFX session: %d.\n", sts);
1162  goto fail;
1163  }
1164 
1165  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
1166  "version is %d.%d\n", ver.Major, ver.Minor);
1167 
1168  *psession = session;
1169 
1170  return 0;
1171 
1172 fail:
1173  if (session)
1174  MFXClose(session);
1175 
1176  return AVERROR_UNKNOWN;
1177 }
1178 
1179 static int qsv_create_mfx_session(void *ctx,
1180  mfxHDL handle,
1181  mfxHandleType handle_type,
1182  mfxIMPL implementation,
1183  mfxVersion *pver,
1184  mfxSession *psession,
1185  void **ploader)
1186 {
1187  mfxLoader loader = NULL;
1188 
1190  "Use Intel(R) oneVPL to create MFX session, API version is "
1191  "%d.%d, the required implementation version is %d.%d\n",
1192  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1193 
1194  if (handle_type != MFX_HANDLE_VA_DISPLAY &&
1195  handle_type != MFX_HANDLE_D3D9_DEVICE_MANAGER &&
1196  handle_type != MFX_HANDLE_D3D11_DEVICE) {
1198  "Invalid MFX device handle type\n");
1199  return AVERROR(EXDEV);
1200  }
1201 
1202  *psession = NULL;
1203 
1204  if (!*ploader) {
1205  if (qsv_new_mfx_loader(ctx, handle, handle_type, implementation, pver, (void **)&loader))
1206  goto fail;
1207 
1208  av_assert0(loader);
1209  } else
1210  loader = *ploader; // Use the input mfxLoader to create mfx session
1211 
1212  if (qsv_create_mfx_session_from_loader(ctx, loader, psession))
1213  goto fail;
1214 
1215  if (!*ploader)
1216  *ploader = loader;
1217 
1218  return 0;
1219 
1220 fail:
1221  if (!*ploader && loader)
1222  MFXUnload(loader);
1223 
1224  return AVERROR_UNKNOWN;
1225 }
1226 
1227 #else
1228 
1229 static int qsv_create_mfx_session(void *ctx,
1230  mfxHDL handle,
1231  mfxHandleType handle_type,
1232  mfxIMPL implementation,
1233  mfxVersion *pver,
1234  mfxSession *psession,
1235  void **ploader)
1236 {
1237  mfxVersion ver;
1238  mfxStatus sts;
1239  mfxSession session = NULL;
1240 
1242  "Use Intel(R) Media SDK to create MFX session, API version is "
1243  "%d.%d, the required implementation version is %d.%d\n",
1244  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1245 
1246  *ploader = NULL;
1247  *psession = NULL;
1248  ver = *pver;
1249  sts = MFXInit(implementation, &ver, &session);
1250  if (sts != MFX_ERR_NONE) {
1251  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1252  "%d.\n", sts);
1253  goto fail;
1254  }
1255 
1256  sts = MFXQueryVersion(session, &ver);
1257  if (sts != MFX_ERR_NONE) {
1258  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: "
1259  "%d.\n", sts);
1260  goto fail;
1261  }
1262 
1263  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
1264  "version is %d.%d\n", ver.Major, ver.Minor);
1265 
1266  MFXClose(session);
1267 
1268  sts = MFXInit(implementation, &ver, &session);
1269  if (sts != MFX_ERR_NONE) {
1270  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1271  "%d.\n", sts);
1272  goto fail;
1273  }
1274 
1275  *psession = session;
1276 
1277  return 0;
1278 
1279 fail:
1280  if (session)
1281  MFXClose(session);
1282 
1283  return AVERROR_UNKNOWN;
1284 }
1285 
1286 #endif
1287 
1289  mfxSession *session, int upload)
1290 {
1291  QSVFramesContext *s = ctx->hwctx;
1292  AVQSVFramesContext *frames_hwctx = &s->p;
1293  QSVDeviceContext *device_priv = ctx->device_ctx->hwctx;
1294  AVQSVDeviceContext *hwctx = &device_priv->p;
1295  int opaque = 0;
1296 
1297  mfxFrameAllocator frame_allocator = {
1298  .pthis = ctx,
1299  .Alloc = frame_alloc,
1300  .Lock = frame_lock,
1301  .Unlock = frame_unlock,
1302  .GetHDL = frame_get_hdl,
1303  .Free = frame_free,
1304  };
1305 
1306  mfxVideoParam par;
1307  mfxStatus err;
1308  int ret = AVERROR_UNKNOWN;
1309  /* hwctx->loader is non-NULL for oneVPL user and NULL for non-oneVPL user */
1310  void **loader = &hwctx->loader;
1311  mfxSession parent_session = hwctx->session;
1312  mfxIMPL impl;
1313  mfxVersion ver;
1314 
1315  err = MFXQueryIMPL(parent_session, &impl);
1316  if (err == MFX_ERR_NONE)
1317  err = MFXQueryVersion(parent_session, &ver);
1318  if (err != MFX_ERR_NONE) {
1319  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes.\n");
1320  return AVERROR_UNKNOWN;
1321  }
1322 
1323 #if QSV_HAVE_OPAQUE
1324  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1325 #endif
1326 
1327  ret = qsv_create_mfx_session(ctx, device_priv->handle, device_priv->handle_type,
1328  device_priv->impl, &device_priv->ver, session, loader);
1329  if (ret)
1330  goto fail;
1331 
1332  if (device_priv->handle) {
1333  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
1334  device_priv->handle);
1335  if (err != MFX_ERR_NONE) {
1336  ret = AVERROR_UNKNOWN;
1337  goto fail;
1338  }
1339  }
1340 
1341  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1342  err = MFXJoinSession(parent_session, *session);
1343  if (err != MFX_ERR_NONE) {
1344  av_log(ctx, AV_LOG_ERROR, "Error joining session.\n");
1345  ret = AVERROR_UNKNOWN;
1346  goto fail;
1347  }
1348  }
1349 
1350  if (!opaque) {
1351  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
1352  if (err != MFX_ERR_NONE) {
1353  ret = AVERROR_UNKNOWN;
1354  goto fail;
1355  }
1356  }
1357 
1358  memset(&par, 0, sizeof(par));
1359 
1360  if (!opaque) {
1361  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
1362  MFX_IOPATTERN_IN_VIDEO_MEMORY;
1363  }
1364 #if QSV_HAVE_OPAQUE
1365  else {
1366  par.ExtParam = s->ext_buffers;
1367  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
1368  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
1369  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
1370  }
1371 #endif
1372 
1373  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
1374  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
1375  par.AsyncDepth = 1;
1376 
1377  par.vpp.In = frames_hwctx->nb_surfaces ? frames_hwctx->surfaces[0].Info : *frames_hwctx->info;
1378 
1379  /* Apparently VPP requires the frame rate to be set to some value, otherwise
1380  * init will fail (probably for the framerate conversion filter). Since we
1381  * are only doing data upload/download here, we just invent an arbitrary
1382  * value */
1383  par.vpp.In.FrameRateExtN = 25;
1384  par.vpp.In.FrameRateExtD = 1;
1385  par.vpp.Out = par.vpp.In;
1386 
1387  err = MFXVideoVPP_Init(*session, &par);
1388  if (err != MFX_ERR_NONE) {
1389  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
1390  "Surface upload/download will not be possible\n");
1391 
1392  ret = AVERROR_UNKNOWN;
1393  goto fail;
1394  }
1395 
1396  return 0;
1397 
1398 fail:
1399  if (*session)
1400  MFXClose(*session);
1401 
1402  *session = NULL;
1403 
1404  return ret;
1405 }
1406 
1408 {
1409  QSVFramesContext *s = ctx->hwctx;
1410  AVQSVFramesContext *frames_hwctx = &s->p;
1411 
1412  int opaque = 0;
1413 
1414  uint32_t fourcc;
1415  int i, ret;
1416 
1417 #if QSV_HAVE_OPAQUE
1418  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1419 #endif
1420 
1421  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
1422  if (!fourcc) {
1423  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
1424  return AVERROR(ENOSYS);
1425  }
1426 
1427  if (!ctx->pool) {
1429  if (ret < 0) {
1430  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
1431  return ret;
1432  }
1433  }
1434 
1435  if (!opaque) {
1436  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
1437  if (!s->mem_ids)
1438  return AVERROR(ENOMEM);
1439 
1440  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1441  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
1442  }
1443 #if QSV_HAVE_OPAQUE
1444  else {
1445  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
1446  sizeof(*s->surface_ptrs));
1447  if (!s->surface_ptrs)
1448  return AVERROR(ENOMEM);
1449 
1450  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1451  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
1452 
1453  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
1454  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
1455  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
1456 
1457  s->opaque_alloc.Out = s->opaque_alloc.In;
1458 
1459  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
1460  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
1461 
1462  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
1463  }
1464 #endif
1465 
1466  s->session_download = NULL;
1467  s->session_upload = NULL;
1468 
1469  s->session_download_init = 0;
1470  s->session_upload_init = 0;
1471 
1472 #if HAVE_PTHREADS
1473  pthread_mutex_init(&s->session_lock, NULL);
1474 #endif
1475 
1476  return 0;
1477 }
1478 
1480 {
1481  frame->buf[0] = av_buffer_pool_get(ctx->pool);
1482  if (!frame->buf[0])
1483  return AVERROR(ENOMEM);
1484 
1485  frame->data[3] = frame->buf[0]->data;
1486  frame->format = AV_PIX_FMT_QSV;
1487  frame->width = ctx->width;
1488  frame->height = ctx->height;
1489 
1490  return 0;
1491 }
1492 
1494  enum AVHWFrameTransferDirection dir,
1495  enum AVPixelFormat **formats)
1496 {
1497  enum AVPixelFormat *fmts;
1498 
1499  fmts = av_malloc_array(2, sizeof(*fmts));
1500  if (!fmts)
1501  return AVERROR(ENOMEM);
1502 
1503  fmts[0] = ctx->sw_format;
1504  fmts[1] = AV_PIX_FMT_NONE;
1505 
1506  *formats = fmts;
1507 
1508  return 0;
1509 }
1510 
1512  AVHWFramesContext *src_ctx, int flags)
1513 {
1514  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
1515  int i;
1516 
1517  switch (dst_ctx->device_ctx->type) {
1518 #if CONFIG_VAAPI
1520  {
1521  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
1522  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
1523  sizeof(*dst_hwctx->surface_ids));
1524  if (!dst_hwctx->surface_ids)
1525  return AVERROR(ENOMEM);
1526  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1527  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1528  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
1529  }
1530  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1531  }
1532  break;
1533 #endif
1534 #if CONFIG_D3D11VA
1536  {
1537  D3D11_TEXTURE2D_DESC texDesc;
1538  AVD3D11VAFramesContext *dst_hwctx;
1539  dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
1540  dst_hwctx = dst_ctx->hwctx;
1541  dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
1542  sizeof(*dst_hwctx->texture_infos));
1543  if (!dst_hwctx->texture_infos)
1544  return AVERROR(ENOMEM);
1545  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
1546  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
1547  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1548  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1549  dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
1550  dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
1551  }
1552  ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
1553  dst_hwctx->BindFlags = texDesc.BindFlags;
1554  }
1555  break;
1556 #endif
1557 #if CONFIG_DXVA2
1559  {
1560  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
1561  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
1562  sizeof(*dst_hwctx->surfaces));
1563  if (!dst_hwctx->surfaces)
1564  return AVERROR(ENOMEM);
1565  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1566  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1567  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
1568  }
1569  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1570  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
1571  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
1572  else
1573  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
1574  }
1575  break;
1576 #endif
1577  default:
1578  return AVERROR(ENOSYS);
1579  }
1580 
1581  return 0;
1582 }
1583 
1585  AVFrame *dst, const AVFrame *src, int flags)
1586 {
1587  QSVFramesContext *s = ctx->hwctx;
1588  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
1589  AVHWFramesContext *child_frames_ctx;
1590  const AVPixFmtDescriptor *desc;
1591  uint8_t *child_data;
1592  AVFrame *dummy;
1593  int ret = 0;
1594 
1595  if (!s->child_frames_ref)
1596  return AVERROR(ENOSYS);
1597  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1598 
1599  switch (child_frames_ctx->device_ctx->type) {
1600 #if CONFIG_VAAPI
1602  {
1603  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1604  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
1605  * we need this casting for vaapi.
1606  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
1607  * to avoid compile warning */
1608  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
1609  break;
1610  }
1611 #endif
1612 #if CONFIG_D3D11VA
1614  {
1615  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1616  child_data = pair->first;
1617  break;
1618  }
1619 #endif
1620 #if CONFIG_DXVA2
1622  {
1623  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1624  child_data = pair->first;
1625  break;
1626  }
1627 #endif
1628  default:
1629  return AVERROR(ENOSYS);
1630  }
1631 
1632  if (dst->format == child_frames_ctx->format) {
1633  ret = ff_hwframe_map_create(s->child_frames_ref,
1634  dst, src, NULL, NULL);
1635  if (ret < 0)
1636  return ret;
1637 
1638  dst->width = src->width;
1639  dst->height = src->height;
1640 
1641  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1642  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1643  dst->data[0] = pair->first;
1644  dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1645  } else {
1646  dst->data[3] = child_data;
1647  }
1648 
1649  return 0;
1650  }
1651 
1653  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
1654  // This only supports mapping to software.
1655  return AVERROR(ENOSYS);
1656  }
1657 
1658  dummy = av_frame_alloc();
1659  if (!dummy)
1660  return AVERROR(ENOMEM);
1661 
1662  dummy->buf[0] = av_buffer_ref(src->buf[0]);
1663  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
1664  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
1665  goto fail;
1666 
1667  dummy->format = child_frames_ctx->format;
1668  dummy->width = src->width;
1669  dummy->height = src->height;
1670 
1671  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1672  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1673  dummy->data[0] = pair->first;
1674  dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1675  } else {
1676  dummy->data[3] = child_data;
1677  }
1678 
1679  ret = av_hwframe_map(dst, dummy, flags);
1680 
1681 fail:
1682  av_frame_free(&dummy);
1683 
1684  return ret;
1685 }
1686 
1688  const AVFrame *src)
1689 {
1690  QSVFramesContext *s = ctx->hwctx;
1691  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1692  int download = !!src->hw_frames_ctx;
1693  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
1694 
1695  AVFrame *dummy;
1696  int ret;
1697 
1698  dummy = av_frame_alloc();
1699  if (!dummy)
1700  return AVERROR(ENOMEM);
1701 
1702  dummy->format = child_frames_ctx->format;
1703  dummy->width = src->width;
1704  dummy->height = src->height;
1705  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
1706  dummy->data[3] = surf->Data.MemId;
1707  dummy->hw_frames_ctx = s->child_frames_ref;
1708 
1709  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
1711 
1712  dummy->buf[0] = NULL;
1713  dummy->data[3] = NULL;
1714  dummy->hw_frames_ctx = NULL;
1715 
1716  av_frame_free(&dummy);
1717 
1718  return ret;
1719 }
1720 
1721 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
1722 {
1723  switch (frame->format) {
1724  case AV_PIX_FMT_NV12:
1725  case AV_PIX_FMT_P010:
1726  case AV_PIX_FMT_P012:
1727  surface->Data.Y = frame->data[0];
1728  surface->Data.UV = frame->data[1];
1729  break;
1730 
1731  case AV_PIX_FMT_YUV420P:
1732  surface->Data.Y = frame->data[0];
1733  surface->Data.U = frame->data[1];
1734  surface->Data.V = frame->data[2];
1735  break;
1736 
1737  case AV_PIX_FMT_BGRA:
1738  surface->Data.B = frame->data[0];
1739  surface->Data.G = frame->data[0] + 1;
1740  surface->Data.R = frame->data[0] + 2;
1741  surface->Data.A = frame->data[0] + 3;
1742  break;
1743  case AV_PIX_FMT_YUYV422:
1744  surface->Data.Y = frame->data[0];
1745  surface->Data.U = frame->data[0] + 1;
1746  surface->Data.V = frame->data[0] + 3;
1747  break;
1748 
1749  case AV_PIX_FMT_Y210:
1750  case AV_PIX_FMT_Y212:
1751  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1752  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1753  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1754  break;
1755  case AV_PIX_FMT_VUYX:
1756  surface->Data.V = frame->data[0];
1757  surface->Data.U = frame->data[0] + 1;
1758  surface->Data.Y = frame->data[0] + 2;
1759  // Only set Data.A to a valid address, the SDK doesn't
1760  // use the value from the frame.
1761  surface->Data.A = frame->data[0] + 3;
1762  break;
1763  case AV_PIX_FMT_XV30:
1764  surface->Data.U = frame->data[0];
1765  break;
1766  case AV_PIX_FMT_XV36:
1767  surface->Data.U = frame->data[0];
1768  surface->Data.Y = frame->data[0] + 2;
1769  surface->Data.V = frame->data[0] + 4;
1770  // Only set Data.A to a valid address, the SDK doesn't
1771  // use the value from the frame.
1772  surface->Data.A = frame->data[0] + 6;
1773  break;
1774 #if CONFIG_VAAPI
1775  case AV_PIX_FMT_UYVY422:
1776  surface->Data.Y = frame->data[0] + 1;
1777  surface->Data.U = frame->data[0];
1778  surface->Data.V = frame->data[0] + 2;
1779  break;
1780 #endif
1781  default:
1782  return MFX_ERR_UNSUPPORTED;
1783  }
1784  surface->Data.Pitch = frame->linesize[0];
1785  surface->Data.TimeStamp = frame->pts;
1786 
1787  return 0;
1788 }
1789 
1791 {
1792  QSVFramesContext *s = ctx->hwctx;
1793  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1794  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1795  int ret = 0;
1796 
1797  if (atomic_load(inited))
1798  return 0;
1799 
1800 #if HAVE_PTHREADS
1801  pthread_mutex_lock(&s->session_lock);
1802 #endif
1803 
1804  if (!atomic_load(inited)) {
1805  ret = qsv_init_internal_session(ctx, session, upload);
1806  atomic_store(inited, 1);
1807  }
1808 
1809 #if HAVE_PTHREADS
1810  pthread_mutex_unlock(&s->session_lock);
1811 #endif
1812 
1813  return ret;
1814 }
1815 
1817  const AVFrame *src)
1818 {
1819  QSVFramesContext *s = ctx->hwctx;
1820  mfxFrameSurface1 out = {{ 0 }};
1821  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1822 
1823  mfxSyncPoint sync = NULL;
1824  mfxStatus err;
1825  int ret = 0;
1826  /* download to temp frame if the output is not padded as libmfx requires */
1827  AVFrame *tmp_frame = &s->realigned_download_frame;
1828  AVFrame *dst_frame;
1829  int realigned = 0;
1830 
1832  if (ret < 0)
1833  return ret;
1834 
1835  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1836  * Height must be a multiple of 16 for progressive frame sequence and a
1837  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1838  if (dst->height & 15 || dst->linesize[0] & 15) {
1839  realigned = 1;
1840  if (tmp_frame->format != dst->format ||
1841  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1842  tmp_frame->height != FFALIGN(dst->height, 16)) {
1843  av_frame_unref(tmp_frame);
1844 
1845  tmp_frame->format = dst->format;
1846  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1847  tmp_frame->height = FFALIGN(dst->height, 16);
1848  ret = av_frame_get_buffer(tmp_frame, 0);
1849  if (ret < 0)
1850  return ret;
1851  }
1852  }
1853 
1854  dst_frame = realigned ? tmp_frame : dst;
1855 
1856  if (!s->session_download) {
1857  if (s->child_frames_ref)
1858  return qsv_transfer_data_child(ctx, dst_frame, src);
1859 
1860  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1861  return AVERROR(ENOSYS);
1862  }
1863 
1864  out.Info = in->Info;
1865  map_frame_to_surface(dst_frame, &out);
1866 
1867  do {
1868  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1869  if (err == MFX_WRN_DEVICE_BUSY)
1870  av_usleep(1);
1871  } while (err == MFX_WRN_DEVICE_BUSY);
1872 
1873  if (err < 0 || !sync) {
1874  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1875  return AVERROR_UNKNOWN;
1876  }
1877 
1878  do {
1879  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1880  } while (err == MFX_WRN_IN_EXECUTION);
1881  if (err < 0) {
1882  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1883  return AVERROR_UNKNOWN;
1884  }
1885 
1886  if (realigned) {
1887  tmp_frame->width = dst->width;
1888  tmp_frame->height = dst->height;
1889  ret = av_frame_copy(dst, tmp_frame);
1890  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1891  tmp_frame->height = FFALIGN(dst->height, 16);
1892  if (ret < 0)
1893  return ret;
1894  }
1895 
1896  return 0;
1897 }
1898 
1900  const AVFrame *src)
1901 {
1902  QSVFramesContext *s = ctx->hwctx;
1903  mfxFrameSurface1 in = {{ 0 }};
1904  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1905  mfxFrameInfo tmp_info;
1906 
1907  mfxSyncPoint sync = NULL;
1908  mfxStatus err;
1909  int ret = 0;
1910  /* make a copy if the input is not padded as libmfx requires */
1911  AVFrame *tmp_frame = &s->realigned_upload_frame;
1912  const AVFrame *src_frame;
1913  int realigned = 0;
1914 
1916  if (ret < 0)
1917  return ret;
1918 
1919  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1920  * Height must be a multiple of 16 for progressive frame sequence and a
1921  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1922  if (src->height & 15 || src->linesize[0] & 15) {
1923  realigned = 1;
1924  if (tmp_frame->format != src->format ||
1925  tmp_frame->width != FFALIGN(src->width, 16) ||
1926  tmp_frame->height != FFALIGN(src->height, 16)) {
1927  av_frame_unref(tmp_frame);
1928 
1929  tmp_frame->format = src->format;
1930  tmp_frame->width = FFALIGN(src->width, 16);
1931  tmp_frame->height = FFALIGN(src->height, 16);
1932  ret = av_frame_get_buffer(tmp_frame, 0);
1933  if (ret < 0)
1934  return ret;
1935  }
1936  ret = av_frame_copy(tmp_frame, src);
1937  if (ret < 0) {
1938  av_frame_unref(tmp_frame);
1939  return ret;
1940  }
1941  ret = qsv_fill_border(tmp_frame, src);
1942  if (ret < 0) {
1943  av_frame_unref(tmp_frame);
1944  return ret;
1945  }
1946 
1947  tmp_info = out->Info;
1948  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1949  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1950  }
1951 
1952  src_frame = realigned ? tmp_frame : src;
1953 
1954  if (!s->session_upload) {
1955  if (s->child_frames_ref)
1956  return qsv_transfer_data_child(ctx, dst, src_frame);
1957 
1958  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1959  return AVERROR(ENOSYS);
1960  }
1961 
1962  in.Info = out->Info;
1963  map_frame_to_surface(src_frame, &in);
1964 
1965  do {
1966  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1967  if (err == MFX_WRN_DEVICE_BUSY)
1968  av_usleep(1);
1969  } while (err == MFX_WRN_DEVICE_BUSY);
1970 
1971  if (err < 0 || !sync) {
1972  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1973  return AVERROR_UNKNOWN;
1974  }
1975 
1976  do {
1977  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1978  } while (err == MFX_WRN_IN_EXECUTION);
1979  if (err < 0) {
1980  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1981  return AVERROR_UNKNOWN;
1982  }
1983 
1984  if (realigned) {
1985  out->Info.CropW = tmp_info.CropW;
1986  out->Info.CropH = tmp_info.CropH;
1987  }
1988 
1989  return 0;
1990 }
1991 
1993  AVHWFramesContext *src_ctx, int flags)
1994 {
1995  QSVFramesContext *s = dst_ctx->hwctx;
1996  AVQSVFramesContext *dst_hwctx = &s->p;
1997  mfxFrameSurface1 mfx_surf1;
1998 
1999  switch (src_ctx->device_ctx->type) {
2000 #if CONFIG_VAAPI
2002  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
2003  break;
2004 #endif
2005 
2006 #if CONFIG_D3D11VA
2008  {
2009  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
2010 
2011  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
2012  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
2013  } else {
2014  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
2015  }
2016  }
2017  break;
2018 #endif
2019 
2020  default:
2021  return AVERROR(ENOSYS);
2022  }
2023 
2024  memset(&mfx_surf1, 0, sizeof(mfx_surf1));
2025  qsv_init_surface(dst_ctx, &mfx_surf1);
2026  s->frame_info = mfx_surf1.Info;
2027  dst_hwctx->info = &s->frame_info;
2028  dst_hwctx->nb_surfaces = 0;
2029  return 0;
2030 }
2031 
2033  AVHWFramesContext *src_ctx, int flags)
2034 {
2035  QSVFramesContext *s = dst_ctx->hwctx;
2036  AVQSVFramesContext *dst_hwctx = &s->p;
2037  int i;
2038 
2039  switch (src_ctx->device_ctx->type) {
2040 #if CONFIG_VAAPI
2042  {
2043  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
2044  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
2045  sizeof(*s->handle_pairs_internal));
2046  if (!s->handle_pairs_internal)
2047  return AVERROR(ENOMEM);
2048  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
2049  sizeof(*s->surfaces_internal));
2050  if (!s->surfaces_internal)
2051  return AVERROR(ENOMEM);
2052  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
2053  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
2054  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
2055  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
2056  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
2057  }
2058  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
2059  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
2060  }
2061  break;
2062 #endif
2063 #if CONFIG_D3D11VA
2065  {
2066  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
2067  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
2068  sizeof(*s->handle_pairs_internal));
2069  if (!s->handle_pairs_internal)
2070  return AVERROR(ENOMEM);
2071  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
2072  sizeof(*s->surfaces_internal));
2073  if (!s->surfaces_internal)
2074  return AVERROR(ENOMEM);
2075  for (i = 0; i < src_ctx->initial_pool_size; i++) {
2076  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
2077  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
2078  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
2079  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
2080  } else {
2081  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
2082  }
2083  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
2084  }
2085  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
2086  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
2087  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
2088  } else {
2089  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
2090  }
2091  }
2092  break;
2093 #endif
2094 #if CONFIG_DXVA2
2096  {
2097  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
2098  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
2099  sizeof(*s->handle_pairs_internal));
2100  if (!s->handle_pairs_internal)
2101  return AVERROR(ENOMEM);
2102  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
2103  sizeof(*s->surfaces_internal));
2104  if (!s->surfaces_internal)
2105  return AVERROR(ENOMEM);
2106  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
2107  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
2108  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
2109  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
2110  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
2111  }
2112  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
2113  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
2114  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
2115  else
2116  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
2117  }
2118  break;
2119 #endif
2120  default:
2121  return AVERROR(ENOSYS);
2122  }
2123 
2124  dst_hwctx->surfaces = s->surfaces_internal;
2125 
2126  return 0;
2127 }
2128 
2130  AVHWFramesContext *src_ctx, int flags)
2131 {
2132  if (src_ctx->initial_pool_size < 0) {
2133  av_log(dst_ctx, AV_LOG_ERROR, "Invalid src frame pool. \n");
2134  return AVERROR(EINVAL);
2135  } else if (src_ctx->initial_pool_size == 0) {
2136  return qsv_dynamic_frames_derive_to(dst_ctx, src_ctx, flags);
2137  } else {
2138  return qsv_fixed_frames_derive_to(dst_ctx, src_ctx, flags);
2139  }
2140 }
2141 
2143  AVFrame *dst, const AVFrame *src, int flags)
2144 {
2145  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
2146  int i, err, index = -1;
2147 
2148  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
2149  switch(src->format) {
2150 #if CONFIG_VAAPI
2151  case AV_PIX_FMT_VAAPI:
2152  {
2153  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
2154  if (*(VASurfaceID*)pair->first == (VASurfaceID)(uintptr_t)src->data[3]) {
2155  index = i;
2156  break;
2157  }
2158  }
2159 #endif
2160 #if CONFIG_D3D11VA
2161  case AV_PIX_FMT_D3D11:
2162  {
2163  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
2164  if (pair->first == src->data[0]
2165  && (pair->second == src->data[1]
2166  || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
2167  index = i;
2168  break;
2169  }
2170  }
2171 #endif
2172 #if CONFIG_DXVA2
2173  case AV_PIX_FMT_DXVA2_VLD:
2174  {
2175  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
2176  if (pair->first == src->data[3]) {
2177  index = i;
2178  break;
2179  }
2180  }
2181 #endif
2182  }
2183  }
2184  if (index < 0) {
2185  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
2186  "is not in the mapped frames context.\n");
2187  return AVERROR(EINVAL);
2188  }
2189 
2191  dst, src, NULL, NULL);
2192  if (err)
2193  return err;
2194 
2195  dst->width = src->width;
2196  dst->height = src->height;
2197  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
2198 
2199  return 0;
2200 }
2201 
2203 {
2204  mfxFrameSurface1 *surfaces_internal = (mfxFrameSurface1 *)hwmap->priv;
2205  mfxHDLPair *handle_pairs_internal = (mfxHDLPair *)surfaces_internal->Data.MemId;
2207 
2208  switch (src_ctx->format) {
2209 #if CONFIG_VAAPI
2210  case AV_PIX_FMT_VAAPI:
2211  {
2212  av_freep(&handle_pairs_internal->first);
2213 
2214  break;
2215  }
2216 #endif
2217 
2218 #if CONFIG_D3D11VA
2219  case AV_PIX_FMT_D3D11:
2220  {
2221  /* Do nothing */
2222  break;
2223  }
2224 #endif
2225  default:
2226  av_log(ctx, AV_LOG_ERROR, "Should not reach here. \n");
2227  break;
2228  }
2229 
2230  av_freep(&handle_pairs_internal);
2231  av_freep(&surfaces_internal);
2232 }
2233 
2235  AVFrame *dst, const AVFrame *src, int flags)
2236 {
2237  mfxFrameSurface1 *surfaces_internal = NULL;
2238  mfxHDLPair *handle_pairs_internal = NULL;
2239  int ret = 0;
2240 
2241  surfaces_internal = av_calloc(1, sizeof(*surfaces_internal));
2242  if (!surfaces_internal) {
2243  ret = AVERROR(ENOMEM);
2244  goto fail;
2245  }
2246 
2247  handle_pairs_internal = av_calloc(1, sizeof(*handle_pairs_internal));
2248  if (!handle_pairs_internal) {
2249  ret = AVERROR(ENOMEM);
2250  goto fail;
2251  }
2252 
2253  ret = qsv_init_surface(dst_ctx, surfaces_internal);
2254  if (ret < 0)
2255  goto fail;
2256 
2257  switch (src->format) {
2258 #if CONFIG_VAAPI
2259  case AV_PIX_FMT_VAAPI:
2260  {
2261  VASurfaceID *surface_id_internal;
2262 
2263  surface_id_internal = av_calloc(1, sizeof(*surface_id_internal));
2264  if (!surface_id_internal) {
2265  ret =AVERROR(ENOMEM);
2266  goto fail;
2267  }
2268 
2269  *surface_id_internal = (VASurfaceID)(uintptr_t)src->data[3];
2270  handle_pairs_internal->first = (mfxHDL)surface_id_internal;
2271  handle_pairs_internal->second = (mfxMemId)MFX_INFINITE;
2272 
2273  break;
2274  }
2275 #endif
2276 
2277 #if CONFIG_D3D11VA
2278  case AV_PIX_FMT_D3D11:
2279  {
2280  AVHWFramesContext *src_ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
2281  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
2282 
2283  handle_pairs_internal->first = (mfxMemId)src->data[0];
2284 
2285  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
2286  handle_pairs_internal->second = (mfxMemId)MFX_INFINITE;
2287  } else {
2288  handle_pairs_internal->second = (mfxMemId)src->data[1];
2289  }
2290 
2291  break;
2292  }
2293 #endif
2294  default:
2295  ret = AVERROR(ENOSYS);
2296  goto fail;
2297  }
2298 
2299  surfaces_internal->Data.MemId = (mfxMemId)handle_pairs_internal;
2300 
2302  dst, src, qsv_dynamic_pool_unmap, surfaces_internal);
2303  if (ret)
2304  goto fail;
2305 
2306  dst->width = src->width;
2307  dst->height = src->height;
2308  dst->data[3] = (uint8_t*)surfaces_internal;
2309 
2310  return 0;
2311 
2312 fail:
2313  av_freep(&handle_pairs_internal);
2314  av_freep(&surfaces_internal);
2315  return ret;
2316 }
2317 
2318 static int qsv_map_to(AVHWFramesContext *dst_ctx,
2319  AVFrame *dst, const AVFrame *src, int flags)
2320 {
2321  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
2322 
2323  if (hwctx->nb_surfaces)
2324  return qsv_fixed_pool_map_to(dst_ctx, dst, src, flags);
2325  else
2326  return qsv_dynamic_pool_map_to(dst_ctx, dst, src, flags);
2327 }
2328 
2330  const void *hwconfig,
2331  AVHWFramesConstraints *constraints)
2332 {
2333  int i;
2334 
2336  sizeof(*constraints->valid_sw_formats));
2337  if (!constraints->valid_sw_formats)
2338  return AVERROR(ENOMEM);
2339 
2340  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
2341  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
2343 
2344  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
2345  if (!constraints->valid_hw_formats)
2346  return AVERROR(ENOMEM);
2347 
2348  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
2349  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
2350 
2351  return 0;
2352 }
2353 
2355 {
2356  AVQSVDeviceContext *hwctx = ctx->hwctx;
2357  QSVDevicePriv *priv = ctx->user_opaque;
2358 
2359  if (hwctx->session)
2360  MFXClose(hwctx->session);
2361 
2362  if (hwctx->loader)
2363  MFXUnload(hwctx->loader);
2365  av_freep(&priv);
2366 }
2367 
2368 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
2369 {
2370  static const struct {
2371  const char *name;
2372  mfxIMPL impl;
2373  } impl_map[] = {
2374  { "auto", MFX_IMPL_AUTO },
2375  { "sw", MFX_IMPL_SOFTWARE },
2376  { "hw", MFX_IMPL_HARDWARE },
2377  { "auto_any", MFX_IMPL_AUTO_ANY },
2378  { "hw_any", MFX_IMPL_HARDWARE_ANY },
2379  { "hw2", MFX_IMPL_HARDWARE2 },
2380  { "hw3", MFX_IMPL_HARDWARE3 },
2381  { "hw4", MFX_IMPL_HARDWARE4 },
2382  };
2383 
2384  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
2385  int i;
2386 
2387  if (device) {
2388  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
2389  if (!strcmp(device, impl_map[i].name)) {
2390  impl = impl_map[i].impl;
2391  break;
2392  }
2393  if (i == FF_ARRAY_ELEMS(impl_map))
2394  impl = strtol(device, NULL, 0);
2395  }
2396 
2397  if (impl != MFX_IMPL_SOFTWARE) {
2398  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
2399  impl |= MFX_IMPL_VIA_D3D11;
2400  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
2401  impl |= MFX_IMPL_VIA_D3D9;
2402  }
2403 
2404  return impl;
2405 }
2406 
2408  mfxIMPL implementation,
2409  AVHWDeviceContext *child_device_ctx,
2410  int flags)
2411 {
2412  AVQSVDeviceContext *hwctx = ctx->hwctx;
2413 
2414  mfxVersion ver = { { 3, 1 } };
2415  mfxHDL handle;
2416  mfxHandleType handle_type;
2417  mfxStatus err;
2418  int ret;
2419 
2420  switch (child_device_ctx->type) {
2421 #if CONFIG_VAAPI
2423  {
2424  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2425  handle_type = MFX_HANDLE_VA_DISPLAY;
2426  handle = (mfxHDL)child_device_hwctx->display;
2427  }
2428  break;
2429 #endif
2430 #if CONFIG_D3D11VA
2432  {
2433  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2434  handle_type = MFX_HANDLE_D3D11_DEVICE;
2435  handle = (mfxHDL)child_device_hwctx->device;
2436  }
2437  break;
2438 #endif
2439 #if CONFIG_DXVA2
2441  {
2442  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2443  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
2444  handle = (mfxHDL)child_device_hwctx->devmgr;
2445  }
2446  break;
2447 #endif
2448  default:
2449  ret = AVERROR(ENOSYS);
2450  goto fail;
2451  }
2452 
2453  ret = qsv_create_mfx_session(ctx, handle, handle_type, implementation, &ver,
2454  &hwctx->session, &hwctx->loader);
2455  if (ret)
2456  goto fail;
2457 
2458  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
2459  if (err != MFX_ERR_NONE) {
2460  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
2461  "%d\n", err);
2462  ret = AVERROR_UNKNOWN;
2463  goto fail;
2464  }
2465 
2466  return 0;
2467 
2468 fail:
2469  if (hwctx->session)
2470  MFXClose(hwctx->session);
2471 
2472  if (hwctx->loader)
2473  MFXUnload(hwctx->loader);
2474 
2475  hwctx->session = NULL;
2476  hwctx->loader = NULL;
2477  return ret;
2478 }
2479 
2481  AVHWDeviceContext *child_device_ctx,
2482  AVDictionary *opts, int flags)
2483 {
2484  mfxIMPL impl;
2485  QSVDevicePriv *priv;
2486 
2487  priv = av_mallocz(sizeof(*priv));
2488  if (!priv)
2489  return AVERROR(ENOMEM);
2490 
2491  ctx->user_opaque = priv;
2492  ctx->free = qsv_device_free;
2493 
2494  impl = choose_implementation("hw_any", child_device_ctx->type);
2495  return qsv_device_derive_from_child(ctx, impl,
2496  child_device_ctx, flags);
2497 }
2498 
2499 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
2500  AVDictionary *opts, int flags)
2501 {
2502  QSVDevicePriv *priv;
2503  enum AVHWDeviceType child_device_type;
2504  AVHWDeviceContext *child_device;
2505  AVDictionary *child_device_opts;
2506  AVDictionaryEntry *e;
2507 
2508  mfxIMPL impl;
2509  int ret;
2510 
2511  priv = av_mallocz(sizeof(*priv));
2512  if (!priv)
2513  return AVERROR(ENOMEM);
2514 
2515  ctx->user_opaque = priv;
2516  ctx->free = qsv_device_free;
2517 
2518  e = av_dict_get(opts, "child_device_type", NULL, 0);
2519  if (e) {
2520  child_device_type = av_hwdevice_find_type_by_name(e->value);
2521  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
2522  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
2523  "\"%s\".\n", e->value);
2524  return AVERROR(EINVAL);
2525  }
2526 #if QSV_ONEVPL
2527  } else if (CONFIG_D3D11VA) { // Use D3D11 by default if d3d11va is enabled
2529  "Defaulting child_device_type to AV_HWDEVICE_TYPE_D3D11VA for oneVPL."
2530  "Please explicitly set child device type via \"-init_hw_device\" "
2531  "option if needed.\n");
2532  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2533  } else if (CONFIG_DXVA2) {
2534  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2535 #else
2536  } else if (CONFIG_DXVA2) {
2538  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
2539  "with old commandlines. This behaviour will be removed "
2540  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
2541  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2542  } else if (CONFIG_D3D11VA) {
2543  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2544 #endif
2545  } else if (CONFIG_VAAPI) {
2546  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
2547  } else {
2548  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2549  return AVERROR(ENOSYS);
2550  }
2551 
2552 #if CONFIG_VAAPI && defined(_WIN32)
2553  /* AV_HWDEVICE_TYPE_VAAPI on Windows/Libva-win32 not supported */
2554  /* Reject user specified child_device_type or CONFIG_VAAPI on Windows */
2555  if (child_device_type == AV_HWDEVICE_TYPE_VAAPI) {
2556  av_log(ctx, AV_LOG_ERROR, "VAAPI child device type not supported for oneVPL on Windows"
2557  "\"%s\".\n", e->value);
2558  return AVERROR(EINVAL);
2559  }
2560 #endif
2561 
2562  child_device_opts = NULL;
2563  switch (child_device_type) {
2564 #if CONFIG_VAAPI
2566  {
2567  // libmfx does not actually implement VAAPI properly, rather it
2568  // depends on the specific behaviour of a matching iHD driver when
2569  // used on recent Intel hardware. Set options to the VAAPI device
2570  // creation so that we should pick a usable setup by default if
2571  // possible, even when multiple devices and drivers are available.
2572  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
2573  av_dict_set(&child_device_opts, "driver", "iHD", 0);
2574  }
2575  break;
2576 #endif
2577 #if CONFIG_D3D11VA
2579  {
2580  // Make sure the hardware vendor is Intel when multiple devices are
2581  // available, it will be ignored if user specifies the child device
2582  // explicitly
2583  av_dict_set(&child_device_opts, "vendor_id", "0x8086", 0);
2584  }
2585  break;
2586 #endif
2587 #if CONFIG_DXVA2
2589 #if QSV_ONEVPL
2590  {
2592  "d3d11va is not available or child device type is set to dxva2 "
2593  "explicitly for oneVPL.\n");
2594  }
2595 #endif
2596  break;
2597 #endif
2598  default:
2599  {
2600  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2601  return AVERROR(ENOSYS);
2602  }
2603  break;
2604  }
2605 
2606  e = av_dict_get(opts, "child_device", NULL, 0);
2607  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
2608  e ? e->value : NULL, child_device_opts, 0);
2609 
2610  av_dict_free(&child_device_opts);
2611  if (ret < 0)
2612  return ret;
2613 
2614  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
2615 
2616  impl = choose_implementation(device, child_device_type);
2617 
2618  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
2619 }
2620 
2623  .name = "QSV",
2624 
2625  .device_hwctx_size = sizeof(QSVDeviceContext),
2626  .frames_hwctx_size = sizeof(QSVFramesContext),
2627 
2628  .device_create = qsv_device_create,
2629  .device_derive = qsv_device_derive,
2630  .device_init = qsv_device_init,
2631  .frames_get_constraints = qsv_frames_get_constraints,
2632  .frames_init = qsv_frames_init,
2633  .frames_uninit = qsv_frames_uninit,
2634  .frames_get_buffer = qsv_get_buffer,
2635  .transfer_get_formats = qsv_transfer_get_formats,
2636  .transfer_data_to = qsv_transfer_data_to,
2637  .transfer_data_from = qsv_transfer_data_from,
2638  .map_to = qsv_map_to,
2639  .map_from = qsv_map_from,
2640  .frames_derive_to = qsv_frames_derive_to,
2641  .frames_derive_from = qsv_frames_derive_from,
2642 
2643  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
2644 };
qsv_pool_release
static void qsv_pool_release(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:386
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
FFHWFramesContext::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:101
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1687
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:75
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:55
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:288
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:81
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:106
qsv_fixed_frames_derive_to
static int qsv_fixed_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:2032
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1899
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1584
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:208
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
QSVDeviceContext::p
AVQSVDeviceContext p
The public AVQSVDeviceContext.
Definition: hwcontext_qsv.c:81
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:245
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:85
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVFrame::width
int width
Definition: frame.h:446
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2480
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1511
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:669
qsv_fixed_pool_alloc
static AVBufferRef * qsv_fixed_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:411
data
const char data[16]
Definition: mxf.c:148
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:2368
QSVDeviceContext
Definition: hwcontext_qsv.c:77
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:102
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:534
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:34
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:726
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:139
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:446
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:133
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:208
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:587
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:84
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1816
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:441
QSVDevicePriv
Definition: hwcontext_qsv.c:73
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:88
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:524
fail
#define fail()
Definition: checkasm.h:182
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
dummy
int dummy
Definition: motion.c:66
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:2329
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::frame_info
mfxFrameInfo frame_info
Definition: hwcontext_qsv.c:123
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:99
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:2129
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: hwcontext_qsv.c:59
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:803
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:532
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
ffhwframesctx
static FFHWFramesContext * ffhwframesctx(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:115
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:161
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:453
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:118
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:101
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:775
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:62
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:447
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:74
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:59
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:83
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:112
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
qsv_dynamic_pool_unmap
static void qsv_dynamic_pool_unmap(AVHWFramesContext *ctx, HWMapDescriptor *hwmap)
Definition: hwcontext_qsv.c:2202
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
MFXUnload
#define MFXUnload(a)
Definition: hwcontext_qsv.c:70
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:2621
qsv_create_mfx_session
static int qsv_create_mfx_session(void *ctx, mfxHDL handle, mfxHandleType handle_type, mfxIMPL implementation, mfxVersion *pver, mfxSession *psession, void **ploader)
Definition: hwcontext_qsv.c:1229
opts
AVDictionary * opts
Definition: movenc.c:51
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
A pointer to a mfxFrameSurface1 struct.
Definition: hwcontext_qsv.h:59
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:349
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:63
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1790
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:1407
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:1721
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:813
index
int index
Definition: gxfenc.c:90
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:82
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:120
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:1288
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:117
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:1479
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:529
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:999
QSVSurface::mfx_surface
mfxFrameSurface1 mfx_surface
Definition: hwcontext_qsv.c:127
size
int size
Definition: twinvq_data.h:10344
FFHWFramesContext::source_frames
AVBufferRef * source_frames
For a derived context, a reference to the original frames context it was derived from.
Definition: hwcontext_internal.h:107
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:109
QSVSurface::child_frame
AVFrame * child_frame
Definition: hwcontext_qsv.c:128
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:2354
qsv_fixed_pool_map_to
static int qsv_fixed_pool_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:2142
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:181
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:1493
buffer.h
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:533
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:2407
AVQSVDeviceContext::loader
void * loader
The mfxLoader handle used for mfxSession creation.
Definition: hwcontext_qsv.h:47
supported_pixel_formats
static const struct @401 supported_pixel_formats[]
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Number of frames in the pool.
Definition: hwcontext_qsv.h:70
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:818
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:126
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:293
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:108
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:115
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:606
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:98
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
qsv_dynamic_pool_alloc
static AVBufferRef * qsv_dynamic_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:426
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:264
qsv_dynamic_pool_map_to
static int qsv_dynamic_pool_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:2234
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:403
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:2318
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:132
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:86
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:121
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:433
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:808
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:725
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
AVQSVFramesContext::info
mfxFrameInfo * info
A pointer to a mfxFrameInfo struct.
Definition: hwcontext_qsv.h:83
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:382
AVFrame::height
int height
Definition: frame.h:446
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:89
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:100
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2499
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:528
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:187
desc
const char * desc
Definition: libsvtav1.c:79
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
mfx_shift
uint16_t mfx_shift
Definition: hwcontext_qsv.c:134
qsv_shift_from_pix_fmt
static uint16_t qsv_shift_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:218
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:107
AVDictionaryEntry
Definition: dict.h:89
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:114
qsv_dynamic_frames_derive_to
static int qsv_dynamic_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1992
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:535
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:512
QSVSurface
Definition: hwcontext_qsv.c:126
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:91
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:491
HWMapDescriptor
Definition: hwcontext_internal.h:120
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:705
QSVFramesContext::p
AVQSVFramesContext p
The public AVQSVFramesContext.
Definition: hwcontext_qsv.c:96
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:78