FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41 
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52 #include "avassert.h"
53 
54 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
55  (MFX_VERSION_MAJOR > (MAJOR) || \
56  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
57 
58 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
59 #define QSV_ONEVPL QSV_VERSION_ATLEAST(2, 0)
60 #define QSV_HAVE_OPAQUE !QSV_ONEVPL
61 
62 #if QSV_ONEVPL
63 #include <mfxdispatcher.h>
64 #else
65 #define MFXUnload(a) do { } while(0)
66 #endif
67 
68 typedef struct QSVDevicePriv {
71 
72 typedef struct QSVDeviceContext {
73  mfxHDL handle;
74  mfxHandleType handle_type;
75  mfxVersion ver;
76  mfxIMPL impl;
77 
81 
82 typedef struct QSVFramesContext {
83  mfxSession session_download;
85  mfxSession session_upload;
87 #if HAVE_PTHREADS
88  pthread_mutex_t session_lock;
89 #endif
90 
92  mfxFrameSurface1 *surfaces_internal;
93  mfxHDLPair *handle_pairs_internal;
95 
96  // used in the frame allocator for non-opaque surfaces
97  mfxMemId *mem_ids;
98 #if QSV_HAVE_OPAQUE
99  // used in the opaque alloc request for opaque surfaces
100  mfxFrameSurface1 **surface_ptrs;
101 
102  mfxExtOpaqueSurfaceAlloc opaque_alloc;
103  mfxExtBuffer *ext_buffers[1];
104 #endif
108 
109 static const struct {
111  uint32_t fourcc;
112  uint16_t mfx_shift;
114  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12, 0 },
115  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4, 0 },
116  { AV_PIX_FMT_P010, MFX_FOURCC_P010, 1 },
117  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8, 0 },
119  MFX_FOURCC_YUY2, 0 },
120 #if CONFIG_VAAPI
122  MFX_FOURCC_UYVY, 0 },
123 #endif
124  { AV_PIX_FMT_Y210,
125  MFX_FOURCC_Y210, 1 },
126  // VUYX is used for VAAPI child device,
127  // the SDK only delares support for AYUV
128  { AV_PIX_FMT_VUYX,
129  MFX_FOURCC_AYUV, 0 },
130  // XV30 is used for VAAPI child device,
131  // the SDK only delares support for Y410
132  { AV_PIX_FMT_XV30,
133  MFX_FOURCC_Y410, 0 },
134 #if QSV_VERSION_ATLEAST(1, 31)
135  // P012 is used for VAAPI child device,
136  // the SDK only delares support for P016
137  { AV_PIX_FMT_P012,
138  MFX_FOURCC_P016, 1 },
139  // Y212 is used for VAAPI child device,
140  // the SDK only delares support for Y216
141  { AV_PIX_FMT_Y212,
142  MFX_FOURCC_Y216, 1 },
143  // XV36 is used for VAAPI child device,
144  // the SDK only delares support for Y416
145  { AV_PIX_FMT_XV36,
146  MFX_FOURCC_Y416, 1 },
147 #endif
148 };
149 
150 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
151  enum AVHWDeviceType base_dev_type,
152  void **base_handle);
153 
154 /**
155  * Caller needs to allocate enough space for base_handle pointer.
156  **/
157 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
158  enum AVHWDeviceType base_dev_type,
159  void **base_handle)
160 {
161  mfxHDLPair *handle_pair;
162  handle_pair = surf->Data.MemId;
163  switch (base_dev_type) {
164 #if CONFIG_VAAPI
166  base_handle[0] = handle_pair->first;
167  return 0;
168 #endif
169 #if CONFIG_D3D11VA
171  base_handle[0] = handle_pair->first;
172  base_handle[1] = handle_pair->second;
173  return 0;
174 #endif
175 #if CONFIG_DXVA2
177  base_handle[0] = handle_pair->first;
178  return 0;
179 #endif
180  }
181  return AVERROR(EINVAL);
182 }
183 
185 {
186  int i;
187  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
189  return supported_pixel_formats[i].fourcc;
190  }
191  return 0;
192 }
193 
195 {
196  for (int i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
198  return supported_pixel_formats[i].mfx_shift;
199  }
200 
201  return 0;
202 }
203 
204 #if CONFIG_D3D11VA
205 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
206 {
207  uint32_t bind_flags = 0;
208 
209  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
210  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
211  else
212  bind_flags = D3D11_BIND_DECODER;
213 
214  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
215  bind_flags = D3D11_BIND_RENDER_TARGET;
216 
217  return bind_flags;
218 }
219 #endif
220 
221 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
222 {
223  const AVPixFmtDescriptor *desc;
224  int i, planes_nb = 0;
225  if (dst->format != src->format)
226  return AVERROR(EINVAL);
227 
229 
230  for (i = 0; i < desc->nb_components; i++)
231  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
232 
233  for (i = 0; i < planes_nb; i++) {
234  int sheight, dheight, y;
235  ptrdiff_t swidth = av_image_get_linesize(src->format,
236  src->width,
237  i);
238  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
239  dst->width,
240  i);
241  const AVComponentDescriptor comp = desc->comp[i];
242  if (swidth < 0 || dwidth < 0) {
243  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
244  return AVERROR(EINVAL);
245  }
246  sheight = src->height;
247  dheight = dst->height;
248  if (i) {
249  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
250  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
251  }
252  //fill right padding
253  for (y = 0; y < sheight; y++) {
254  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
255  av_memcpy_backptr(line_ptr,
256  comp.depth > 8 ? 2 : 1,
257  dwidth - swidth);
258  }
259  //fill bottom padding
260  for (y = sheight; y < dheight; y++) {
261  memcpy(dst->data[i]+y*dst->linesize[i],
262  dst->data[i]+(sheight-1)*dst->linesize[i],
263  dwidth);
264  }
265  }
266  return 0;
267 }
268 
270 {
271  AVQSVDeviceContext *hwctx = ctx->hwctx;
272  QSVDeviceContext *s = ctx->internal->priv;
273  int hw_handle_supported = 0;
274  mfxHandleType handle_type;
275  enum AVHWDeviceType device_type;
276  enum AVPixelFormat pix_fmt;
277  mfxStatus err;
278 
279  err = MFXQueryIMPL(hwctx->session, &s->impl);
280  if (err == MFX_ERR_NONE)
281  err = MFXQueryVersion(hwctx->session, &s->ver);
282  if (err != MFX_ERR_NONE) {
283  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
284  return AVERROR_UNKNOWN;
285  }
286 
287  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
288 #if CONFIG_VAAPI
289  handle_type = MFX_HANDLE_VA_DISPLAY;
290  device_type = AV_HWDEVICE_TYPE_VAAPI;
292  hw_handle_supported = 1;
293 #endif
294  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
295 #if CONFIG_D3D11VA
296  handle_type = MFX_HANDLE_D3D11_DEVICE;
297  device_type = AV_HWDEVICE_TYPE_D3D11VA;
299  hw_handle_supported = 1;
300 #endif
301  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
302 #if CONFIG_DXVA2
303  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
304  device_type = AV_HWDEVICE_TYPE_DXVA2;
306  hw_handle_supported = 1;
307 #endif
308  }
309 
310  if (hw_handle_supported) {
311  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
312  if (err == MFX_ERR_NONE) {
313  s->handle_type = handle_type;
314  s->child_device_type = device_type;
315  s->child_pix_fmt = pix_fmt;
316  }
317  }
318  if (!s->handle) {
319  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
320  "from the session\n");
321  }
322  return 0;
323 }
324 
326 {
327  QSVFramesContext *s = ctx->internal->priv;
328 
329  if (s->session_download) {
330  MFXVideoVPP_Close(s->session_download);
331  MFXClose(s->session_download);
332  }
333  s->session_download = NULL;
334  s->session_download_init = 0;
335 
336  if (s->session_upload) {
337  MFXVideoVPP_Close(s->session_upload);
338  MFXClose(s->session_upload);
339  }
340  s->session_upload = NULL;
341  s->session_upload_init = 0;
342 
343 #if HAVE_PTHREADS
344  pthread_mutex_destroy(&s->session_lock);
345 #endif
346 
347  av_freep(&s->mem_ids);
348 #if QSV_HAVE_OPAQUE
349  av_freep(&s->surface_ptrs);
350 #endif
351  av_freep(&s->surfaces_internal);
352  av_freep(&s->handle_pairs_internal);
353  av_frame_unref(&s->realigned_upload_frame);
354  av_frame_unref(&s->realigned_download_frame);
355  av_buffer_unref(&s->child_frames_ref);
356 }
357 
358 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
359 {
360 }
361 
362 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
363 {
365  QSVFramesContext *s = ctx->internal->priv;
366  AVQSVFramesContext *hwctx = ctx->hwctx;
367 
368  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
369  s->nb_surfaces_used++;
370  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
371  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
372  }
373 
374  return NULL;
375 }
376 
378 {
379  AVQSVFramesContext *hwctx = ctx->hwctx;
380  QSVFramesContext *s = ctx->internal->priv;
381  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
382 
383  AVBufferRef *child_device_ref = NULL;
384  AVBufferRef *child_frames_ref = NULL;
385 
386  AVHWDeviceContext *child_device_ctx;
387  AVHWFramesContext *child_frames_ctx;
388 
389  int i, ret = 0;
390 
391  if (!device_priv->handle) {
393  "Cannot create a non-opaque internal surface pool without "
394  "a hardware handle\n");
395  return AVERROR(EINVAL);
396  }
397 
398  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
399  if (!child_device_ref)
400  return AVERROR(ENOMEM);
401  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
402 
403 #if CONFIG_VAAPI
404  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
405  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
406  child_device_hwctx->display = (VADisplay)device_priv->handle;
407  }
408 #endif
409 #if CONFIG_D3D11VA
410  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
411  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
412  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
413  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
414  }
415 #endif
416 #if CONFIG_DXVA2
417  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
418  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
419  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
420  }
421 #endif
422 
423  ret = av_hwdevice_ctx_init(child_device_ref);
424  if (ret < 0) {
425  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
426  goto fail;
427  }
428 
429  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
430  if (!child_frames_ref) {
431  ret = AVERROR(ENOMEM);
432  goto fail;
433  }
434  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
435 
436  child_frames_ctx->format = device_priv->child_pix_fmt;
437  child_frames_ctx->sw_format = ctx->sw_format;
438  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
439  child_frames_ctx->width = FFALIGN(ctx->width, 16);
440  child_frames_ctx->height = FFALIGN(ctx->height, 16);
441 
442 #if CONFIG_D3D11VA
443  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
444  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
445  if (hwctx->frame_type == 0)
446  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
447  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
448  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
449  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
450  }
451 #endif
452 #if CONFIG_DXVA2
453  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
454  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
455  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
456  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
457  else
458  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
459  }
460 #endif
461 
462  ret = av_hwframe_ctx_init(child_frames_ref);
463  if (ret < 0) {
464  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
465  goto fail;
466  }
467 
468 #if CONFIG_VAAPI
469  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
470  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
471  for (i = 0; i < ctx->initial_pool_size; i++) {
472  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
473  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
474  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
475  }
476  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
477  }
478 #endif
479 #if CONFIG_D3D11VA
480  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
481  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
482  for (i = 0; i < ctx->initial_pool_size; i++) {
483  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
484  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
485  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
486  } else {
487  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
488  }
489  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
490  }
491  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
492  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
493  } else {
494  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
495  }
496  }
497 #endif
498 #if CONFIG_DXVA2
499  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
500  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
501  for (i = 0; i < ctx->initial_pool_size; i++) {
502  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
503  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
504  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
505  }
506  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
507  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
508  else
509  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
510  }
511 #endif
512 
513  s->child_frames_ref = child_frames_ref;
514  child_frames_ref = NULL;
515 
516 fail:
517  av_buffer_unref(&child_device_ref);
518  av_buffer_unref(&child_frames_ref);
519  return ret;
520 }
521 
522 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
523 {
524  const AVPixFmtDescriptor *desc;
525  uint32_t fourcc;
526 
527  desc = av_pix_fmt_desc_get(ctx->sw_format);
528  if (!desc)
529  return AVERROR(EINVAL);
530 
531  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
532  if (!fourcc)
533  return AVERROR(EINVAL);
534 
535  surf->Info.BitDepthLuma = desc->comp[0].depth;
536  surf->Info.BitDepthChroma = desc->comp[0].depth;
537  surf->Info.Shift = qsv_shift_from_pix_fmt(ctx->sw_format);
538 
539  if (desc->log2_chroma_w && desc->log2_chroma_h)
540  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
541  else if (desc->log2_chroma_w)
542  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
543  else
544  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
545 
546  surf->Info.FourCC = fourcc;
547  surf->Info.Width = FFALIGN(ctx->width, 16);
548  surf->Info.CropW = ctx->width;
549  surf->Info.Height = FFALIGN(ctx->height, 16);
550  surf->Info.CropH = ctx->height;
551  surf->Info.FrameRateExtN = 25;
552  surf->Info.FrameRateExtD = 1;
553  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
554 
555  return 0;
556 }
557 
559 {
560  QSVFramesContext *s = ctx->internal->priv;
561  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
562 
563  int i, ret = 0;
564 
565  if (ctx->initial_pool_size <= 0) {
566  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
567  return AVERROR(EINVAL);
568  }
569 
570  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
571  sizeof(*s->handle_pairs_internal));
572  if (!s->handle_pairs_internal)
573  return AVERROR(ENOMEM);
574 
575  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
576  sizeof(*s->surfaces_internal));
577  if (!s->surfaces_internal)
578  return AVERROR(ENOMEM);
579 
580  for (i = 0; i < ctx->initial_pool_size; i++) {
581  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
582  if (ret < 0)
583  return ret;
584  }
585 
586 #if QSV_HAVE_OPAQUE
587  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
589  if (ret < 0)
590  return ret;
591  }
592 #else
594  if (ret < 0)
595  return ret;
596 #endif
597 
598  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
600  if (!ctx->internal->pool_internal)
601  return AVERROR(ENOMEM);
602 
603  frames_hwctx->surfaces = s->surfaces_internal;
604  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
605 
606  return 0;
607 }
608 
609 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
610  mfxFrameAllocResponse *resp)
611 {
612  AVHWFramesContext *ctx = pthis;
613  QSVFramesContext *s = ctx->internal->priv;
614  AVQSVFramesContext *hwctx = ctx->hwctx;
615  mfxFrameInfo *i = &req->Info;
616  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
617 
618  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
619  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
620  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
621  return MFX_ERR_UNSUPPORTED;
622  if (i->Width > i1->Width || i->Height > i1->Height ||
623  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
624  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
625  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
626  i->Width, i->Height, i->FourCC, i->ChromaFormat,
627  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
628  return MFX_ERR_UNSUPPORTED;
629  }
630 
631  resp->mids = s->mem_ids;
632  resp->NumFrameActual = hwctx->nb_surfaces;
633 
634  return MFX_ERR_NONE;
635 }
636 
637 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
638 {
639  return MFX_ERR_NONE;
640 }
641 
642 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
643 {
644  return MFX_ERR_UNSUPPORTED;
645 }
646 
647 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
648 {
649  return MFX_ERR_UNSUPPORTED;
650 }
651 
652 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
653 {
654  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
655  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
656 
657  pair_dst->first = pair_src->first;
658 
659  if (pair_src->second != (mfxMemId)MFX_INFINITE)
660  pair_dst->second = pair_src->second;
661  return MFX_ERR_NONE;
662 }
663 
664 #if QSV_ONEVPL
665 
666 static int qsv_d3d11_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
667 {
668  int ret = AVERROR_UNKNOWN;
669 #if CONFIG_D3D11VA
670  mfxStatus sts;
671  IDXGIAdapter *pDXGIAdapter;
672  DXGI_ADAPTER_DESC adapterDesc;
673  IDXGIDevice *pDXGIDevice = NULL;
674  HRESULT hr;
675  ID3D11Device *device = handle;
676  mfxVariant impl_value;
677 
678  hr = ID3D11Device_QueryInterface(device, &IID_IDXGIDevice, (void**)&pDXGIDevice);
679  if (SUCCEEDED(hr)) {
680  hr = IDXGIDevice_GetAdapter(pDXGIDevice, &pDXGIAdapter);
681  if (FAILED(hr)) {
682  av_log(ctx, AV_LOG_ERROR, "Error IDXGIDevice_GetAdapter %d\n", hr);
683  IDXGIDevice_Release(pDXGIDevice);
684  return ret;
685  }
686 
687  hr = IDXGIAdapter_GetDesc(pDXGIAdapter, &adapterDesc);
688  if (FAILED(hr)) {
689  av_log(ctx, AV_LOG_ERROR, "Error IDXGIAdapter_GetDesc %d\n", hr);
690  goto fail;
691  }
692  } else {
693  av_log(ctx, AV_LOG_ERROR, "Error ID3D11Device_QueryInterface %d\n", hr);
694  return ret;
695  }
696 
697  impl_value.Type = MFX_VARIANT_TYPE_U16;
698  impl_value.Data.U16 = adapterDesc.DeviceId;
699  sts = MFXSetConfigFilterProperty(cfg,
700  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
701  if (sts != MFX_ERR_NONE) {
702  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
703  "DeviceID property: %d.\n", sts);
704  goto fail;
705  }
706 
707  impl_value.Type = MFX_VARIANT_TYPE_PTR;
708  impl_value.Data.Ptr = &adapterDesc.AdapterLuid;
709  sts = MFXSetConfigFilterProperty(cfg,
710  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
711  if (sts != MFX_ERR_NONE) {
712  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
713  "DeviceLUID property: %d.\n", sts);
714  goto fail;
715  }
716 
717  impl_value.Type = MFX_VARIANT_TYPE_U32;
718  impl_value.Data.U32 = 0x0001;
719  sts = MFXSetConfigFilterProperty(cfg,
720  (const mfxU8 *)"mfxExtendedDeviceId.LUIDDeviceNodeMask", impl_value);
721  if (sts != MFX_ERR_NONE) {
722  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
723  "LUIDDeviceNodeMask property: %d.\n", sts);
724  goto fail;
725  }
726 
727  ret = 0;
728 
729 fail:
730  IDXGIAdapter_Release(pDXGIAdapter);
731  IDXGIDevice_Release(pDXGIDevice);
732 #endif
733  return ret;
734 }
735 
736 static int qsv_d3d9_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
737 {
738  int ret = AVERROR_UNKNOWN;
739 #if CONFIG_DXVA2
740  mfxStatus sts;
741  IDirect3DDeviceManager9* devmgr = handle;
742  IDirect3DDevice9Ex *device = NULL;
743  HANDLE device_handle = 0;
744  IDirect3D9Ex *d3d9ex = NULL;
745  LUID luid;
746  D3DDEVICE_CREATION_PARAMETERS params;
747  HRESULT hr;
748  mfxVariant impl_value;
749 
750  hr = IDirect3DDeviceManager9_OpenDeviceHandle(devmgr, &device_handle);
751  if (FAILED(hr)) {
752  av_log(ctx, AV_LOG_ERROR, "Error OpenDeviceHandle %d\n", hr);
753  goto fail;
754  }
755 
756  hr = IDirect3DDeviceManager9_LockDevice(devmgr, device_handle, &device, TRUE);
757  if (FAILED(hr)) {
758  av_log(ctx, AV_LOG_ERROR, "Error LockDevice %d\n", hr);
759  IDirect3DDeviceManager9_CloseDeviceHandle(devmgr, device_handle);
760  goto fail;
761  }
762 
763  hr = IDirect3DDevice9Ex_GetCreationParameters(device, &params);
764  if (FAILED(hr)) {
765  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9_GetCreationParameters %d\n", hr);
766  IDirect3DDevice9Ex_Release(device);
767  goto unlock;
768  }
769 
770  hr = IDirect3DDevice9Ex_GetDirect3D(device, &d3d9ex);
771  if (FAILED(hr)) {
772  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
773  IDirect3DDevice9Ex_Release(device);
774  goto unlock;
775  }
776 
777  hr = IDirect3D9Ex_GetAdapterLUID(d3d9ex, params.AdapterOrdinal, &luid);
778  if (FAILED(hr)) {
779  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
780  goto release;
781  }
782 
783  impl_value.Type = MFX_VARIANT_TYPE_PTR;
784  impl_value.Data.Ptr = &luid;
785  sts = MFXSetConfigFilterProperty(cfg,
786  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
787  if (sts != MFX_ERR_NONE) {
788  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
789  "DeviceLUID property: %d.\n", sts);
790  goto release;
791  }
792 
793  ret = 0;
794 
795 release:
796  IDirect3D9Ex_Release(d3d9ex);
797  IDirect3DDevice9Ex_Release(device);
798 
799 unlock:
800  IDirect3DDeviceManager9_UnlockDevice(devmgr, device_handle, FALSE);
801  IDirect3DDeviceManager9_CloseDeviceHandle(devmgr, device_handle);
802 fail:
803 #endif
804  return ret;
805 }
806 
807 static int qsv_va_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
808 {
809 #if CONFIG_VAAPI
810 #if VA_CHECK_VERSION(1, 15, 0)
811  mfxStatus sts;
812  VADisplay dpy = handle;
813  VAStatus vas;
814  VADisplayAttribute attr = {
815  .type = VADisplayPCIID,
816  };
817  mfxVariant impl_value;
818 
819  vas = vaGetDisplayAttributes(dpy, &attr, 1);
820  if (vas == VA_STATUS_SUCCESS && attr.flags != VA_DISPLAY_ATTRIB_NOT_SUPPORTED) {
821  impl_value.Type = MFX_VARIANT_TYPE_U16;
822  impl_value.Data.U16 = (attr.value & 0xFFFF);
823  sts = MFXSetConfigFilterProperty(cfg,
824  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
825  if (sts != MFX_ERR_NONE) {
826  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
827  "DeviceID property: %d.\n", sts);
828  goto fail;
829  }
830  } else {
831  av_log(ctx, AV_LOG_ERROR, "libva: Failed to get device id from the driver. Please "
832  "consider to upgrade the driver to support VA-API 1.15.0\n");
833  goto fail;
834  }
835 
836  return 0;
837 
838 fail:
839 #else
840  av_log(ctx, AV_LOG_ERROR, "libva: This version of libva doesn't support retrieving "
841  "the device information from the driver. Please consider to upgrade libva to "
842  "support VA-API 1.15.0\n");
843 #endif
844 #endif
845  return AVERROR_UNKNOWN;
846 }
847 
848 static int qsv_new_mfx_loader(void *ctx,
849  mfxHDL handle,
850  mfxHandleType handle_type,
851  mfxIMPL implementation,
852  mfxVersion *pver,
853  void **ploader)
854 {
855  mfxStatus sts;
856  mfxLoader loader = NULL;
857  mfxConfig cfg;
858  mfxVariant impl_value;
859 
860  *ploader = NULL;
861  loader = MFXLoad();
862  if (!loader) {
863  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
864  goto fail;
865  }
866 
867  /* Create configurations for implementation */
868  cfg = MFXCreateConfig(loader);
869  if (!cfg) {
870  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX configuration\n");
871  goto fail;
872  }
873 
874  impl_value.Type = MFX_VARIANT_TYPE_U32;
875  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
876  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
877  sts = MFXSetConfigFilterProperty(cfg,
878  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
879  if (sts != MFX_ERR_NONE) {
880  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
881  "property: %d.\n", sts);
882  goto fail;
883  }
884 
885  impl_value.Type = MFX_VARIANT_TYPE_U32;
886  impl_value.Data.U32 = pver->Version;
887  sts = MFXSetConfigFilterProperty(cfg,
888  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
889  impl_value);
890  if (sts != MFX_ERR_NONE) {
891  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
892  "property: %d.\n", sts);
893  goto fail;
894  }
895 
896  impl_value.Type = MFX_VARIANT_TYPE_U16;
897  impl_value.Data.U16 = 0x8086; // Intel device only
898  sts = MFXSetConfigFilterProperty(cfg,
899  (const mfxU8 *)"mfxExtendedDeviceId.VendorID", impl_value);
900  if (sts != MFX_ERR_NONE) {
901  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
902  "VendorID property: %d.\n", sts);
903  goto fail;
904  }
905 
906  if (MFX_HANDLE_VA_DISPLAY == handle_type) {
907  if (handle && qsv_va_update_config(ctx, handle, cfg))
908  goto fail;
909 
910  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_VAAPI;
911  } else if (MFX_HANDLE_D3D9_DEVICE_MANAGER == handle_type) {
912  if (handle && qsv_d3d9_update_config(ctx, handle, cfg))
913  goto fail;
914 
915  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D9;
916  } else {
917  if (handle && qsv_d3d11_update_config(ctx, handle, cfg))
918  goto fail;
919 
920  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D11;
921  }
922 
923  impl_value.Type = MFX_VARIANT_TYPE_U32;
924  sts = MFXSetConfigFilterProperty(cfg,
925  (const mfxU8 *)"mfxImplDescription.AccelerationMode", impl_value);
926  if (sts != MFX_ERR_NONE) {
927  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
928  "AccelerationMode property: %d.\n", sts);
929  goto fail;
930  }
931 
932  *ploader = loader;
933 
934  return 0;
935 
936 fail:
937  if (loader)
938  MFXUnload(loader);
939 
940  return AVERROR_UNKNOWN;
941 }
942 
943 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
944 {
945  mfxStatus sts;
946  mfxSession session = NULL;
947  uint32_t impl_idx = 0;
948  mfxVersion ver;
949 
950  while (1) {
951  /* Enumerate all implementations */
952  mfxImplDescription *impl_desc;
953 
954  sts = MFXEnumImplementations(loader, impl_idx,
955  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
956  (mfxHDL *)&impl_desc);
957  /* Failed to find an available implementation */
958  if (sts == MFX_ERR_NOT_FOUND)
959  break;
960  else if (sts != MFX_ERR_NONE) {
961  impl_idx++;
962  continue;
963  }
964 
965  sts = MFXCreateSession(loader, impl_idx, &session);
966  MFXDispReleaseImplDescription(loader, impl_desc);
967  if (sts == MFX_ERR_NONE)
968  break;
969 
970  impl_idx++;
971  }
972 
973  if (sts != MFX_ERR_NONE) {
974  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
975  goto fail;
976  }
977 
978  sts = MFXQueryVersion(session, &ver);
979  if (sts != MFX_ERR_NONE) {
980  av_log(ctx, AV_LOG_ERROR, "Error querying a MFX session: %d.\n", sts);
981  goto fail;
982  }
983 
984  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
985  "version is %d.%d\n", ver.Major, ver.Minor);
986 
987  *psession = session;
988 
989  return 0;
990 
991 fail:
992  if (session)
993  MFXClose(session);
994 
995  return AVERROR_UNKNOWN;
996 }
997 
998 static int qsv_create_mfx_session(void *ctx,
999  mfxHDL handle,
1000  mfxHandleType handle_type,
1001  mfxIMPL implementation,
1002  mfxVersion *pver,
1003  mfxSession *psession,
1004  void **ploader)
1005 {
1006  mfxLoader loader = NULL;
1007 
1009  "Use Intel(R) oneVPL to create MFX session, API version is "
1010  "%d.%d, the required implementation version is %d.%d\n",
1011  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1012 
1013  if (handle_type != MFX_HANDLE_VA_DISPLAY &&
1014  handle_type != MFX_HANDLE_D3D9_DEVICE_MANAGER &&
1015  handle_type != MFX_HANDLE_D3D11_DEVICE) {
1017  "Invalid MFX device handle type\n");
1018  return AVERROR(EXDEV);
1019  }
1020 
1021  *psession = NULL;
1022 
1023  if (!*ploader) {
1024  if (qsv_new_mfx_loader(ctx, handle, handle_type, implementation, pver, (void **)&loader))
1025  goto fail;
1026 
1027  av_assert0(loader);
1028  } else
1029  loader = *ploader; // Use the input mfxLoader to create mfx session
1030 
1031  if (qsv_create_mfx_session_from_loader(ctx, loader, psession))
1032  goto fail;
1033 
1034  if (!*ploader)
1035  *ploader = loader;
1036 
1037  return 0;
1038 
1039 fail:
1040  if (!*ploader && loader)
1041  MFXUnload(loader);
1042 
1043  return AVERROR_UNKNOWN;
1044 }
1045 
1046 #else
1047 
1048 static int qsv_create_mfx_session(void *ctx,
1049  mfxHDL handle,
1050  mfxHandleType handle_type,
1051  mfxIMPL implementation,
1052  mfxVersion *pver,
1053  mfxSession *psession,
1054  void **ploader)
1055 {
1056  mfxVersion ver;
1057  mfxStatus sts;
1058  mfxSession session = NULL;
1059 
1061  "Use Intel(R) Media SDK to create MFX session, API version is "
1062  "%d.%d, the required implementation version is %d.%d\n",
1063  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1064 
1065  *ploader = NULL;
1066  *psession = NULL;
1067  ver = *pver;
1068  sts = MFXInit(implementation, &ver, &session);
1069  if (sts != MFX_ERR_NONE) {
1070  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1071  "%d.\n", sts);
1072  goto fail;
1073  }
1074 
1075  sts = MFXQueryVersion(session, &ver);
1076  if (sts != MFX_ERR_NONE) {
1077  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: "
1078  "%d.\n", sts);
1079  goto fail;
1080  }
1081 
1082  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
1083  "version is %d.%d\n", ver.Major, ver.Minor);
1084 
1085  MFXClose(session);
1086 
1087  sts = MFXInit(implementation, &ver, &session);
1088  if (sts != MFX_ERR_NONE) {
1089  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1090  "%d.\n", sts);
1091  goto fail;
1092  }
1093 
1094  *psession = session;
1095 
1096  return 0;
1097 
1098 fail:
1099  if (session)
1100  MFXClose(session);
1101 
1102  return AVERROR_UNKNOWN;
1103 }
1104 
1105 #endif
1106 
1108  mfxSession *session, int upload)
1109 {
1110  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
1111  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
1112  int opaque = 0;
1113 
1114  mfxFrameAllocator frame_allocator = {
1115  .pthis = ctx,
1116  .Alloc = frame_alloc,
1117  .Lock = frame_lock,
1118  .Unlock = frame_unlock,
1119  .GetHDL = frame_get_hdl,
1120  .Free = frame_free,
1121  };
1122 
1123  mfxVideoParam par;
1124  mfxStatus err;
1125  int ret = AVERROR_UNKNOWN;
1126  AVQSVDeviceContext *hwctx = ctx->device_ctx->hwctx;
1127  /* hwctx->loader is non-NULL for oneVPL user and NULL for non-oneVPL user */
1128  void **loader = &hwctx->loader;
1129 
1130 #if QSV_HAVE_OPAQUE
1131  QSVFramesContext *s = ctx->internal->priv;
1132  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1133 #endif
1134 
1135  ret = qsv_create_mfx_session(ctx, device_priv->handle, device_priv->handle_type,
1136  device_priv->impl, &device_priv->ver, session, loader);
1137  if (ret)
1138  goto fail;
1139 
1140  if (device_priv->handle) {
1141  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
1142  device_priv->handle);
1143  if (err != MFX_ERR_NONE) {
1144  ret = AVERROR_UNKNOWN;
1145  goto fail;
1146  }
1147  }
1148 
1149  if (!opaque) {
1150  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
1151  if (err != MFX_ERR_NONE) {
1152  ret = AVERROR_UNKNOWN;
1153  goto fail;
1154  }
1155  }
1156 
1157  memset(&par, 0, sizeof(par));
1158 
1159  if (!opaque) {
1160  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
1161  MFX_IOPATTERN_IN_VIDEO_MEMORY;
1162  }
1163 #if QSV_HAVE_OPAQUE
1164  else {
1165  par.ExtParam = s->ext_buffers;
1166  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
1167  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
1168  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
1169  }
1170 #endif
1171 
1172  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
1173  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
1174  par.AsyncDepth = 1;
1175 
1176  par.vpp.In = frames_hwctx->surfaces[0].Info;
1177 
1178  /* Apparently VPP requires the frame rate to be set to some value, otherwise
1179  * init will fail (probably for the framerate conversion filter). Since we
1180  * are only doing data upload/download here, we just invent an arbitrary
1181  * value */
1182  par.vpp.In.FrameRateExtN = 25;
1183  par.vpp.In.FrameRateExtD = 1;
1184  par.vpp.Out = par.vpp.In;
1185 
1186  err = MFXVideoVPP_Init(*session, &par);
1187  if (err != MFX_ERR_NONE) {
1188  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
1189  "Surface upload/download will not be possible\n");
1190 
1191  ret = AVERROR_UNKNOWN;
1192  goto fail;
1193  }
1194 
1195  return 0;
1196 
1197 fail:
1198  if (*session)
1199  MFXClose(*session);
1200 
1201  *session = NULL;
1202 
1203  return ret;
1204 }
1205 
1207 {
1208  QSVFramesContext *s = ctx->internal->priv;
1209  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
1210 
1211  int opaque = 0;
1212 
1213  uint32_t fourcc;
1214  int i, ret;
1215 
1216 #if QSV_HAVE_OPAQUE
1217  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1218 #endif
1219 
1220  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
1221  if (!fourcc) {
1222  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
1223  return AVERROR(ENOSYS);
1224  }
1225 
1226  if (!ctx->pool) {
1228  if (ret < 0) {
1229  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
1230  return ret;
1231  }
1232  }
1233 
1234  if (!opaque) {
1235  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
1236  if (!s->mem_ids)
1237  return AVERROR(ENOMEM);
1238 
1239  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1240  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
1241  }
1242 #if QSV_HAVE_OPAQUE
1243  else {
1244  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
1245  sizeof(*s->surface_ptrs));
1246  if (!s->surface_ptrs)
1247  return AVERROR(ENOMEM);
1248 
1249  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1250  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
1251 
1252  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
1253  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
1254  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
1255 
1256  s->opaque_alloc.Out = s->opaque_alloc.In;
1257 
1258  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
1259  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
1260 
1261  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
1262  }
1263 #endif
1264 
1265  s->session_download = NULL;
1266  s->session_upload = NULL;
1267 
1268  s->session_download_init = 0;
1269  s->session_upload_init = 0;
1270 
1271 #if HAVE_PTHREADS
1272  pthread_mutex_init(&s->session_lock, NULL);
1273 #endif
1274 
1275  return 0;
1276 }
1277 
1279 {
1280  frame->buf[0] = av_buffer_pool_get(ctx->pool);
1281  if (!frame->buf[0])
1282  return AVERROR(ENOMEM);
1283 
1284  frame->data[3] = frame->buf[0]->data;
1286  frame->width = ctx->width;
1287  frame->height = ctx->height;
1288 
1289  return 0;
1290 }
1291 
1293  enum AVHWFrameTransferDirection dir,
1294  enum AVPixelFormat **formats)
1295 {
1296  enum AVPixelFormat *fmts;
1297 
1298  fmts = av_malloc_array(2, sizeof(*fmts));
1299  if (!fmts)
1300  return AVERROR(ENOMEM);
1301 
1302  fmts[0] = ctx->sw_format;
1303  fmts[1] = AV_PIX_FMT_NONE;
1304 
1305  *formats = fmts;
1306 
1307  return 0;
1308 }
1309 
1311  AVHWFramesContext *src_ctx, int flags)
1312 {
1313  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
1314  int i;
1315 
1316  switch (dst_ctx->device_ctx->type) {
1317 #if CONFIG_VAAPI
1319  {
1320  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
1321  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
1322  sizeof(*dst_hwctx->surface_ids));
1323  if (!dst_hwctx->surface_ids)
1324  return AVERROR(ENOMEM);
1325  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1326  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1327  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
1328  }
1329  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1330  }
1331  break;
1332 #endif
1333 #if CONFIG_D3D11VA
1335  {
1336  D3D11_TEXTURE2D_DESC texDesc;
1337  dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
1338  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
1339  dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
1340  sizeof(*dst_hwctx->texture_infos));
1341  if (!dst_hwctx->texture_infos)
1342  return AVERROR(ENOMEM);
1343  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
1344  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
1345  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1346  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1347  dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
1348  dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
1349  }
1350  ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
1351  dst_hwctx->BindFlags = texDesc.BindFlags;
1352  }
1353  break;
1354 #endif
1355 #if CONFIG_DXVA2
1357  {
1358  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
1359  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
1360  sizeof(*dst_hwctx->surfaces));
1361  if (!dst_hwctx->surfaces)
1362  return AVERROR(ENOMEM);
1363  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1364  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1365  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
1366  }
1367  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1368  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
1369  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
1370  else
1371  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
1372  }
1373  break;
1374 #endif
1375  default:
1376  return AVERROR(ENOSYS);
1377  }
1378 
1379  return 0;
1380 }
1381 
1383  AVFrame *dst, const AVFrame *src, int flags)
1384 {
1385  QSVFramesContext *s = ctx->internal->priv;
1386  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
1387  AVHWFramesContext *child_frames_ctx;
1388  const AVPixFmtDescriptor *desc;
1389  uint8_t *child_data;
1390  AVFrame *dummy;
1391  int ret = 0;
1392 
1393  if (!s->child_frames_ref)
1394  return AVERROR(ENOSYS);
1395  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1396 
1397  switch (child_frames_ctx->device_ctx->type) {
1398 #if CONFIG_VAAPI
1400  {
1401  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1402  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
1403  * we need this casting for vaapi.
1404  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
1405  * to avoid compile warning */
1406  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
1407  break;
1408  }
1409 #endif
1410 #if CONFIG_D3D11VA
1412  {
1413  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1414  child_data = pair->first;
1415  break;
1416  }
1417 #endif
1418 #if CONFIG_DXVA2
1420  {
1421  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1422  child_data = pair->first;
1423  break;
1424  }
1425 #endif
1426  default:
1427  return AVERROR(ENOSYS);
1428  }
1429 
1430  if (dst->format == child_frames_ctx->format) {
1431  ret = ff_hwframe_map_create(s->child_frames_ref,
1432  dst, src, NULL, NULL);
1433  if (ret < 0)
1434  return ret;
1435 
1436  dst->width = src->width;
1437  dst->height = src->height;
1438 
1439  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1440  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1441  dst->data[0] = pair->first;
1442  dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1443  } else {
1444  dst->data[3] = child_data;
1445  }
1446 
1447  return 0;
1448  }
1449 
1451  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
1452  // This only supports mapping to software.
1453  return AVERROR(ENOSYS);
1454  }
1455 
1456  dummy = av_frame_alloc();
1457  if (!dummy)
1458  return AVERROR(ENOMEM);
1459 
1460  dummy->buf[0] = av_buffer_ref(src->buf[0]);
1461  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
1462  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
1463  goto fail;
1464 
1465  dummy->format = child_frames_ctx->format;
1466  dummy->width = src->width;
1467  dummy->height = src->height;
1468 
1469  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1470  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1471  dummy->data[0] = pair->first;
1472  dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1473  } else {
1474  dummy->data[3] = child_data;
1475  }
1476 
1477  ret = av_hwframe_map(dst, dummy, flags);
1478 
1479 fail:
1480  av_frame_free(&dummy);
1481 
1482  return ret;
1483 }
1484 
1486  const AVFrame *src)
1487 {
1488  QSVFramesContext *s = ctx->internal->priv;
1489  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1490  int download = !!src->hw_frames_ctx;
1491  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
1492 
1493  AVFrame *dummy;
1494  int ret;
1495 
1496  dummy = av_frame_alloc();
1497  if (!dummy)
1498  return AVERROR(ENOMEM);
1499 
1500  dummy->format = child_frames_ctx->format;
1501  dummy->width = src->width;
1502  dummy->height = src->height;
1503  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
1504  dummy->data[3] = surf->Data.MemId;
1505  dummy->hw_frames_ctx = s->child_frames_ref;
1506 
1507  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
1509 
1510  dummy->buf[0] = NULL;
1511  dummy->data[3] = NULL;
1512  dummy->hw_frames_ctx = NULL;
1513 
1514  av_frame_free(&dummy);
1515 
1516  return ret;
1517 }
1518 
1519 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
1520 {
1521  switch (frame->format) {
1522  case AV_PIX_FMT_NV12:
1523  case AV_PIX_FMT_P010:
1524  case AV_PIX_FMT_P012:
1525  surface->Data.Y = frame->data[0];
1526  surface->Data.UV = frame->data[1];
1527  break;
1528 
1529  case AV_PIX_FMT_YUV420P:
1530  surface->Data.Y = frame->data[0];
1531  surface->Data.U = frame->data[1];
1532  surface->Data.V = frame->data[2];
1533  break;
1534 
1535  case AV_PIX_FMT_BGRA:
1536  surface->Data.B = frame->data[0];
1537  surface->Data.G = frame->data[0] + 1;
1538  surface->Data.R = frame->data[0] + 2;
1539  surface->Data.A = frame->data[0] + 3;
1540  break;
1541  case AV_PIX_FMT_YUYV422:
1542  surface->Data.Y = frame->data[0];
1543  surface->Data.U = frame->data[0] + 1;
1544  surface->Data.V = frame->data[0] + 3;
1545  break;
1546 
1547  case AV_PIX_FMT_Y210:
1548  case AV_PIX_FMT_Y212:
1549  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1550  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1551  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1552  break;
1553  case AV_PIX_FMT_VUYX:
1554  surface->Data.V = frame->data[0];
1555  surface->Data.U = frame->data[0] + 1;
1556  surface->Data.Y = frame->data[0] + 2;
1557  // Only set Data.A to a valid address, the SDK doesn't
1558  // use the value from the frame.
1559  surface->Data.A = frame->data[0] + 3;
1560  break;
1561  case AV_PIX_FMT_XV30:
1562  surface->Data.U = frame->data[0];
1563  break;
1564  case AV_PIX_FMT_XV36:
1565  surface->Data.U = frame->data[0];
1566  surface->Data.Y = frame->data[0] + 2;
1567  surface->Data.V = frame->data[0] + 4;
1568  // Only set Data.A to a valid address, the SDK doesn't
1569  // use the value from the frame.
1570  surface->Data.A = frame->data[0] + 6;
1571  break;
1572 #if CONFIG_VAAPI
1573  case AV_PIX_FMT_UYVY422:
1574  surface->Data.Y = frame->data[0] + 1;
1575  surface->Data.U = frame->data[0];
1576  surface->Data.V = frame->data[0] + 2;
1577  break;
1578 #endif
1579  default:
1580  return MFX_ERR_UNSUPPORTED;
1581  }
1582  surface->Data.Pitch = frame->linesize[0];
1583  surface->Data.TimeStamp = frame->pts;
1584 
1585  return 0;
1586 }
1587 
1589 {
1590  QSVFramesContext *s = ctx->internal->priv;
1591  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1592  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1593  int ret = 0;
1594 
1595  if (atomic_load(inited))
1596  return 0;
1597 
1598 #if HAVE_PTHREADS
1599  pthread_mutex_lock(&s->session_lock);
1600 #endif
1601 
1602  if (!atomic_load(inited)) {
1603  ret = qsv_init_internal_session(ctx, session, upload);
1604  atomic_store(inited, 1);
1605  }
1606 
1607 #if HAVE_PTHREADS
1608  pthread_mutex_unlock(&s->session_lock);
1609 #endif
1610 
1611  return ret;
1612 }
1613 
1615  const AVFrame *src)
1616 {
1617  QSVFramesContext *s = ctx->internal->priv;
1618  mfxFrameSurface1 out = {{ 0 }};
1619  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1620 
1621  mfxSyncPoint sync = NULL;
1622  mfxStatus err;
1623  int ret = 0;
1624  /* download to temp frame if the output is not padded as libmfx requires */
1625  AVFrame *tmp_frame = &s->realigned_download_frame;
1626  AVFrame *dst_frame;
1627  int realigned = 0;
1628 
1630  if (ret < 0)
1631  return ret;
1632 
1633  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1634  * Height must be a multiple of 16 for progressive frame sequence and a
1635  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1636  if (dst->height & 15 || dst->linesize[0] & 15) {
1637  realigned = 1;
1638  if (tmp_frame->format != dst->format ||
1639  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1640  tmp_frame->height != FFALIGN(dst->height, 16)) {
1641  av_frame_unref(tmp_frame);
1642 
1643  tmp_frame->format = dst->format;
1644  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1645  tmp_frame->height = FFALIGN(dst->height, 16);
1646  ret = av_frame_get_buffer(tmp_frame, 0);
1647  if (ret < 0)
1648  return ret;
1649  }
1650  }
1651 
1652  dst_frame = realigned ? tmp_frame : dst;
1653 
1654  if (!s->session_download) {
1655  if (s->child_frames_ref)
1656  return qsv_transfer_data_child(ctx, dst_frame, src);
1657 
1658  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1659  return AVERROR(ENOSYS);
1660  }
1661 
1662  out.Info = in->Info;
1663  map_frame_to_surface(dst_frame, &out);
1664 
1665  do {
1666  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1667  if (err == MFX_WRN_DEVICE_BUSY)
1668  av_usleep(1);
1669  } while (err == MFX_WRN_DEVICE_BUSY);
1670 
1671  if (err < 0 || !sync) {
1672  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1673  return AVERROR_UNKNOWN;
1674  }
1675 
1676  do {
1677  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1678  } while (err == MFX_WRN_IN_EXECUTION);
1679  if (err < 0) {
1680  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1681  return AVERROR_UNKNOWN;
1682  }
1683 
1684  if (realigned) {
1685  tmp_frame->width = dst->width;
1686  tmp_frame->height = dst->height;
1687  ret = av_frame_copy(dst, tmp_frame);
1688  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1689  tmp_frame->height = FFALIGN(dst->height, 16);
1690  if (ret < 0)
1691  return ret;
1692  }
1693 
1694  return 0;
1695 }
1696 
1698  const AVFrame *src)
1699 {
1700  QSVFramesContext *s = ctx->internal->priv;
1701  mfxFrameSurface1 in = {{ 0 }};
1702  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1703  mfxFrameInfo tmp_info;
1704 
1705  mfxSyncPoint sync = NULL;
1706  mfxStatus err;
1707  int ret = 0;
1708  /* make a copy if the input is not padded as libmfx requires */
1709  AVFrame *tmp_frame = &s->realigned_upload_frame;
1710  const AVFrame *src_frame;
1711  int realigned = 0;
1712 
1714  if (ret < 0)
1715  return ret;
1716 
1717  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1718  * Height must be a multiple of 16 for progressive frame sequence and a
1719  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1720  if (src->height & 15 || src->linesize[0] & 15) {
1721  realigned = 1;
1722  if (tmp_frame->format != src->format ||
1723  tmp_frame->width != FFALIGN(src->width, 16) ||
1724  tmp_frame->height != FFALIGN(src->height, 16)) {
1725  av_frame_unref(tmp_frame);
1726 
1727  tmp_frame->format = src->format;
1728  tmp_frame->width = FFALIGN(src->width, 16);
1729  tmp_frame->height = FFALIGN(src->height, 16);
1730  ret = av_frame_get_buffer(tmp_frame, 0);
1731  if (ret < 0)
1732  return ret;
1733  }
1734  ret = av_frame_copy(tmp_frame, src);
1735  if (ret < 0) {
1736  av_frame_unref(tmp_frame);
1737  return ret;
1738  }
1739  ret = qsv_fill_border(tmp_frame, src);
1740  if (ret < 0) {
1741  av_frame_unref(tmp_frame);
1742  return ret;
1743  }
1744 
1745  tmp_info = out->Info;
1746  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1747  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1748  }
1749 
1750  src_frame = realigned ? tmp_frame : src;
1751 
1752  if (!s->session_upload) {
1753  if (s->child_frames_ref)
1754  return qsv_transfer_data_child(ctx, dst, src_frame);
1755 
1756  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1757  return AVERROR(ENOSYS);
1758  }
1759 
1760  in.Info = out->Info;
1761  map_frame_to_surface(src_frame, &in);
1762 
1763  do {
1764  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1765  if (err == MFX_WRN_DEVICE_BUSY)
1766  av_usleep(1);
1767  } while (err == MFX_WRN_DEVICE_BUSY);
1768 
1769  if (err < 0 || !sync) {
1770  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1771  return AVERROR_UNKNOWN;
1772  }
1773 
1774  do {
1775  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1776  } while (err == MFX_WRN_IN_EXECUTION);
1777  if (err < 0) {
1778  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1779  return AVERROR_UNKNOWN;
1780  }
1781 
1782  if (realigned) {
1783  out->Info.CropW = tmp_info.CropW;
1784  out->Info.CropH = tmp_info.CropH;
1785  }
1786 
1787  return 0;
1788 }
1789 
1791  AVHWFramesContext *src_ctx, int flags)
1792 {
1793  QSVFramesContext *s = dst_ctx->internal->priv;
1794  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1795  int i;
1796 
1797  if (src_ctx->initial_pool_size == 0) {
1798  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1799  "mapped to QSV frames.\n");
1800  return AVERROR(EINVAL);
1801  }
1802 
1803  switch (src_ctx->device_ctx->type) {
1804 #if CONFIG_VAAPI
1806  {
1807  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1808  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1809  sizeof(*s->handle_pairs_internal));
1810  if (!s->handle_pairs_internal)
1811  return AVERROR(ENOMEM);
1812  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1813  sizeof(*s->surfaces_internal));
1814  if (!s->surfaces_internal)
1815  return AVERROR(ENOMEM);
1816  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1817  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1818  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1819  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1820  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1821  }
1822  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1823  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1824  }
1825  break;
1826 #endif
1827 #if CONFIG_D3D11VA
1829  {
1830  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1831  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1832  sizeof(*s->handle_pairs_internal));
1833  if (!s->handle_pairs_internal)
1834  return AVERROR(ENOMEM);
1835  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1836  sizeof(*s->surfaces_internal));
1837  if (!s->surfaces_internal)
1838  return AVERROR(ENOMEM);
1839  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1840  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1841  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1842  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1843  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1844  } else {
1845  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1846  }
1847  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1848  }
1849  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1850  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1851  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1852  } else {
1853  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1854  }
1855  }
1856  break;
1857 #endif
1858 #if CONFIG_DXVA2
1860  {
1861  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1862  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1863  sizeof(*s->handle_pairs_internal));
1864  if (!s->handle_pairs_internal)
1865  return AVERROR(ENOMEM);
1866  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1867  sizeof(*s->surfaces_internal));
1868  if (!s->surfaces_internal)
1869  return AVERROR(ENOMEM);
1870  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1871  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1872  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1873  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1874  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1875  }
1876  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1877  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1878  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1879  else
1880  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1881  }
1882  break;
1883 #endif
1884  default:
1885  return AVERROR(ENOSYS);
1886  }
1887 
1888  dst_hwctx->surfaces = s->surfaces_internal;
1889 
1890  return 0;
1891 }
1892 
1893 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1894  AVFrame *dst, const AVFrame *src, int flags)
1895 {
1896  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1897  int i, err, index = -1;
1898 
1899  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1900  switch(src->format) {
1901 #if CONFIG_VAAPI
1902  case AV_PIX_FMT_VAAPI:
1903  {
1904  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1905  if (*(VASurfaceID*)pair->first == (VASurfaceID)(uintptr_t)src->data[3]) {
1906  index = i;
1907  break;
1908  }
1909  }
1910 #endif
1911 #if CONFIG_D3D11VA
1912  case AV_PIX_FMT_D3D11:
1913  {
1914  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1915  if (pair->first == src->data[0]
1916  && (pair->second == src->data[1]
1917  || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
1918  index = i;
1919  break;
1920  }
1921  }
1922 #endif
1923 #if CONFIG_DXVA2
1924  case AV_PIX_FMT_DXVA2_VLD:
1925  {
1926  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1927  if (pair->first == src->data[3]) {
1928  index = i;
1929  break;
1930  }
1931  }
1932 #endif
1933  }
1934  }
1935  if (index < 0) {
1936  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1937  "is not in the mapped frames context.\n");
1938  return AVERROR(EINVAL);
1939  }
1940 
1942  dst, src, NULL, NULL);
1943  if (err)
1944  return err;
1945 
1946  dst->width = src->width;
1947  dst->height = src->height;
1948  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1949 
1950  return 0;
1951 }
1952 
1954  const void *hwconfig,
1955  AVHWFramesConstraints *constraints)
1956 {
1957  int i;
1958 
1960  sizeof(*constraints->valid_sw_formats));
1961  if (!constraints->valid_sw_formats)
1962  return AVERROR(ENOMEM);
1963 
1964  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1965  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1967 
1968  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1969  if (!constraints->valid_hw_formats)
1970  return AVERROR(ENOMEM);
1971 
1972  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1973  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1974 
1975  return 0;
1976 }
1977 
1979 {
1980  AVQSVDeviceContext *hwctx = ctx->hwctx;
1981  QSVDevicePriv *priv = ctx->user_opaque;
1982 
1983  if (hwctx->session)
1984  MFXClose(hwctx->session);
1985 
1986  if (hwctx->loader)
1987  MFXUnload(hwctx->loader);
1989  av_freep(&priv);
1990 }
1991 
1992 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1993 {
1994  static const struct {
1995  const char *name;
1996  mfxIMPL impl;
1997  } impl_map[] = {
1998  { "auto", MFX_IMPL_AUTO },
1999  { "sw", MFX_IMPL_SOFTWARE },
2000  { "hw", MFX_IMPL_HARDWARE },
2001  { "auto_any", MFX_IMPL_AUTO_ANY },
2002  { "hw_any", MFX_IMPL_HARDWARE_ANY },
2003  { "hw2", MFX_IMPL_HARDWARE2 },
2004  { "hw3", MFX_IMPL_HARDWARE3 },
2005  { "hw4", MFX_IMPL_HARDWARE4 },
2006  };
2007 
2008  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
2009  int i;
2010 
2011  if (device) {
2012  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
2013  if (!strcmp(device, impl_map[i].name)) {
2014  impl = impl_map[i].impl;
2015  break;
2016  }
2017  if (i == FF_ARRAY_ELEMS(impl_map))
2018  impl = strtol(device, NULL, 0);
2019  }
2020 
2021  if (impl != MFX_IMPL_SOFTWARE) {
2022  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
2023  impl |= MFX_IMPL_VIA_D3D11;
2024  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
2025  impl |= MFX_IMPL_VIA_D3D9;
2026  }
2027 
2028  return impl;
2029 }
2030 
2032  mfxIMPL implementation,
2033  AVHWDeviceContext *child_device_ctx,
2034  int flags)
2035 {
2036  AVQSVDeviceContext *hwctx = ctx->hwctx;
2037 
2038  mfxVersion ver = { { 3, 1 } };
2039  mfxHDL handle;
2040  mfxHandleType handle_type;
2041  mfxStatus err;
2042  int ret;
2043 
2044  switch (child_device_ctx->type) {
2045 #if CONFIG_VAAPI
2047  {
2048  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2049  handle_type = MFX_HANDLE_VA_DISPLAY;
2050  handle = (mfxHDL)child_device_hwctx->display;
2051  }
2052  break;
2053 #endif
2054 #if CONFIG_D3D11VA
2056  {
2057  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2058  handle_type = MFX_HANDLE_D3D11_DEVICE;
2059  handle = (mfxHDL)child_device_hwctx->device;
2060  }
2061  break;
2062 #endif
2063 #if CONFIG_DXVA2
2065  {
2066  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2067  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
2068  handle = (mfxHDL)child_device_hwctx->devmgr;
2069  }
2070  break;
2071 #endif
2072  default:
2073  ret = AVERROR(ENOSYS);
2074  goto fail;
2075  }
2076 
2077  ret = qsv_create_mfx_session(ctx, handle, handle_type, implementation, &ver,
2078  &hwctx->session, &hwctx->loader);
2079  if (ret)
2080  goto fail;
2081 
2082  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
2083  if (err != MFX_ERR_NONE) {
2084  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
2085  "%d\n", err);
2086  ret = AVERROR_UNKNOWN;
2087  goto fail;
2088  }
2089 
2090  return 0;
2091 
2092 fail:
2093  if (hwctx->session)
2094  MFXClose(hwctx->session);
2095 
2096  if (hwctx->loader)
2097  MFXUnload(hwctx->loader);
2098 
2099  hwctx->session = NULL;
2100  hwctx->loader = NULL;
2101  return ret;
2102 }
2103 
2105  AVHWDeviceContext *child_device_ctx,
2106  AVDictionary *opts, int flags)
2107 {
2108  mfxIMPL impl;
2109  QSVDevicePriv *priv;
2110 
2111  priv = av_mallocz(sizeof(*priv));
2112  if (!priv)
2113  return AVERROR(ENOMEM);
2114 
2115  ctx->user_opaque = priv;
2116  ctx->free = qsv_device_free;
2117 
2118  impl = choose_implementation("hw_any", child_device_ctx->type);
2119  return qsv_device_derive_from_child(ctx, impl,
2120  child_device_ctx, flags);
2121 }
2122 
2123 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
2124  AVDictionary *opts, int flags)
2125 {
2126  QSVDevicePriv *priv;
2127  enum AVHWDeviceType child_device_type;
2128  AVHWDeviceContext *child_device;
2129  AVDictionary *child_device_opts;
2130  AVDictionaryEntry *e;
2131 
2132  mfxIMPL impl;
2133  int ret;
2134 
2135  priv = av_mallocz(sizeof(*priv));
2136  if (!priv)
2137  return AVERROR(ENOMEM);
2138 
2139  ctx->user_opaque = priv;
2140  ctx->free = qsv_device_free;
2141 
2142  e = av_dict_get(opts, "child_device_type", NULL, 0);
2143  if (e) {
2144  child_device_type = av_hwdevice_find_type_by_name(e->value);
2145  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
2146  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
2147  "\"%s\".\n", e->value);
2148  return AVERROR(EINVAL);
2149  }
2150 #if QSV_ONEVPL
2151  } else if (CONFIG_D3D11VA) { // Use D3D11 by default if d3d11va is enabled
2153  "Defaulting child_device_type to AV_HWDEVICE_TYPE_D3D11VA for oneVPL."
2154  "Please explicitly set child device type via \"-init_hw_device\" "
2155  "option if needed.\n");
2156  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2157  } else if (CONFIG_DXVA2) {
2158  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2159 #else
2160  } else if (CONFIG_DXVA2) {
2162  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
2163  "with old commandlines. This behaviour will be removed "
2164  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
2165  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2166  } else if (CONFIG_D3D11VA) {
2167  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2168 #endif
2169  } else if (CONFIG_VAAPI) {
2170  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
2171  } else {
2172  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2173  return AVERROR(ENOSYS);
2174  }
2175 
2176 #if CONFIG_VAAPI && defined(_WIN32)
2177  /* AV_HWDEVICE_TYPE_VAAPI on Windows/Libva-win32 not supported */
2178  /* Reject user specified child_device_type or CONFIG_VAAPI on Windows */
2179  if (child_device_type == AV_HWDEVICE_TYPE_VAAPI) {
2180  av_log(ctx, AV_LOG_ERROR, "VAAPI child device type not supported for oneVPL on Windows"
2181  "\"%s\".\n", e->value);
2182  return AVERROR(EINVAL);
2183  }
2184 #endif
2185 
2186  child_device_opts = NULL;
2187  switch (child_device_type) {
2188 #if CONFIG_VAAPI
2190  {
2191  // libmfx does not actually implement VAAPI properly, rather it
2192  // depends on the specific behaviour of a matching iHD driver when
2193  // used on recent Intel hardware. Set options to the VAAPI device
2194  // creation so that we should pick a usable setup by default if
2195  // possible, even when multiple devices and drivers are available.
2196  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
2197  av_dict_set(&child_device_opts, "driver", "iHD", 0);
2198  }
2199  break;
2200 #endif
2201 #if CONFIG_D3D11VA
2203  break;
2204 #endif
2205 #if CONFIG_DXVA2
2207 #if QSV_ONEVPL
2208  {
2210  "d3d11va is not available or child device type is set to dxva2 "
2211  "explicitly for oneVPL.\n");
2212  }
2213 #endif
2214  break;
2215 #endif
2216  default:
2217  {
2218  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2219  return AVERROR(ENOSYS);
2220  }
2221  break;
2222  }
2223 
2224  e = av_dict_get(opts, "child_device", NULL, 0);
2225  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
2226  e ? e->value : NULL, child_device_opts, 0);
2227 
2228  av_dict_free(&child_device_opts);
2229  if (ret < 0)
2230  return ret;
2231 
2232  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
2233 
2234  impl = choose_implementation(device, child_device_type);
2235 
2236  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
2237 }
2238 
2241  .name = "QSV",
2242 
2243  .device_hwctx_size = sizeof(AVQSVDeviceContext),
2244  .device_priv_size = sizeof(QSVDeviceContext),
2245  .frames_hwctx_size = sizeof(AVQSVFramesContext),
2246  .frames_priv_size = sizeof(QSVFramesContext),
2247 
2248  .device_create = qsv_device_create,
2249  .device_derive = qsv_device_derive,
2250  .device_init = qsv_device_init,
2251  .frames_get_constraints = qsv_frames_get_constraints,
2252  .frames_init = qsv_frames_init,
2253  .frames_uninit = qsv_frames_uninit,
2254  .frames_get_buffer = qsv_get_buffer,
2255  .transfer_get_formats = qsv_transfer_get_formats,
2256  .transfer_data_to = qsv_transfer_data_to,
2257  .transfer_data_from = qsv_transfer_data_from,
2258  .map_to = qsv_map_to,
2259  .map_from = qsv_map_from,
2260  .frames_derive_to = qsv_frames_derive_to,
2261  .frames_derive_from = qsv_frames_derive_from,
2262 
2263  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
2264 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1485
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:60
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:243
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:80
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:91
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1697
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2964
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1382
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:184
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:221
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:100
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:75
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:452
AVFrame::width
int width
Definition: frame.h:412
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2104
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1310
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:794
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:522
data
const char data[16]
Definition: mxf.c:148
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1992
QSVDeviceContext
Definition: hwcontext_qsv.c:72
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:83
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:524
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:34
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:742
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:111
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:201
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:590
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:74
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1614
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:361
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:68
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:78
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:377
fail
#define fail()
Definition: checkasm.h:138
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
dummy
int dummy
Definition: motion.c:66
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1953
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:84
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1790
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:88
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:637
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:522
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:143
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:103
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:86
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:609
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:62
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:445
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:69
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:198
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:51
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:73
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:97
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
MFXUnload
#define MFXUnload(a)
Definition: hwcontext_qsv.c:65
frame
static AVFrame * frame
Definition: demux_decode.c:54
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:2239
qsv_create_mfx_session
static int qsv_create_mfx_session(void *ctx, mfxHDL handle, mfxHandleType handle_type, mfxIMPL implementation, mfxVersion *pver, mfxSession *psession, void **ploader)
Definition: hwcontext_qsv.c:1048
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:54
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:325
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:58
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1588
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:1206
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:240
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:1519
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:647
index
int index
Definition: gxfenc.c:89
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:79
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:105
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:1107
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:102
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:1278
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:519
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:899
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:94
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1978
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:427
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:157
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:1292
buffer.h
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:523
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:2031
AVQSVDeviceContext::loader
void * loader
The mfxLoader handle used for mfxSession creation.
Definition: hwcontext_qsv.h:47
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:333
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:225
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:55
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:652
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:269
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:93
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:100
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:622
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:83
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1893
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:110
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:76
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:106
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:448
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:642
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:752
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:358
AVFrame::height
int height
Definition: frame.h:412
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:79
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:85
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2123
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:518
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
supported_pixel_formats
static const struct @347 supported_pixel_formats[]
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
mfx_shift
uint16_t mfx_shift
Definition: hwcontext_qsv.c:112
qsv_shift_from_pix_fmt
static uint16_t qsv_shift_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:194
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:92
AVDictionaryEntry
Definition: dict.h:89
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:114
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:525
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:385
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:362
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:91
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:403
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:558
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:75