FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41 
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52 #include "avassert.h"
53 
54 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
55  (MFX_VERSION_MAJOR > (MAJOR) || \
56  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
57 
58 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
59 #define QSV_ONEVPL QSV_VERSION_ATLEAST(2, 0)
60 #define QSV_HAVE_OPAQUE !QSV_ONEVPL
61 
62 #if QSV_ONEVPL
63 #include <mfxdispatcher.h>
64 #else
65 #define MFXUnload(a) do { } while(0)
66 #endif
67 
68 typedef struct QSVDevicePriv {
71 
72 typedef struct QSVDeviceContext {
73  mfxHDL handle;
74  mfxHandleType handle_type;
75  mfxVersion ver;
76  mfxIMPL impl;
77 
81 
82 typedef struct QSVFramesContext {
83  mfxSession session_download;
85  mfxSession session_upload;
87 #if HAVE_PTHREADS
88  pthread_mutex_t session_lock;
89 #endif
90 
92  mfxFrameSurface1 *surfaces_internal;
93  mfxHDLPair *handle_pairs_internal;
95 
96  // used in the frame allocator for non-opaque surfaces
97  mfxMemId *mem_ids;
98 #if QSV_HAVE_OPAQUE
99  // used in the opaque alloc request for opaque surfaces
100  mfxFrameSurface1 **surface_ptrs;
101 
102  mfxExtOpaqueSurfaceAlloc opaque_alloc;
103  mfxExtBuffer *ext_buffers[1];
104 #endif
108 
109 static const struct {
111  uint32_t fourcc;
113  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
114  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
115  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
116  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
117 #if CONFIG_VAAPI
119  MFX_FOURCC_YUY2 },
120  { AV_PIX_FMT_Y210,
121  MFX_FOURCC_Y210 },
122 #endif
123 };
124 
125 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
126  enum AVHWDeviceType base_dev_type,
127  void **base_handle);
128 
129 /**
130  * Caller needs to allocate enough space for base_handle pointer.
131  **/
132 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
133  enum AVHWDeviceType base_dev_type,
134  void **base_handle)
135 {
136  mfxHDLPair *handle_pair;
137  handle_pair = surf->Data.MemId;
138  switch (base_dev_type) {
139 #if CONFIG_VAAPI
141  base_handle[0] = handle_pair->first;
142  return 0;
143 #endif
144 #if CONFIG_D3D11VA
146  base_handle[0] = handle_pair->first;
147  base_handle[1] = handle_pair->second;
148  return 0;
149 #endif
150 #if CONFIG_DXVA2
152  base_handle[0] = handle_pair->first;
153  return 0;
154 #endif
155  }
156  return AVERROR(EINVAL);
157 }
158 
160 {
161  int i;
162  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
164  return supported_pixel_formats[i].fourcc;
165  }
166  return 0;
167 }
168 
169 #if CONFIG_D3D11VA
170 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
171 {
172  uint32_t bind_flags = 0;
173 
174  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
175  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
176  else
177  bind_flags = D3D11_BIND_DECODER;
178 
179  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
180  bind_flags = D3D11_BIND_RENDER_TARGET;
181 
182  return bind_flags;
183 }
184 #endif
185 
186 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
187 {
188  const AVPixFmtDescriptor *desc;
189  int i, planes_nb = 0;
190  if (dst->format != src->format)
191  return AVERROR(EINVAL);
192 
194 
195  for (i = 0; i < desc->nb_components; i++)
196  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
197 
198  for (i = 0; i < planes_nb; i++) {
199  int sheight, dheight, y;
200  ptrdiff_t swidth = av_image_get_linesize(src->format,
201  src->width,
202  i);
203  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
204  dst->width,
205  i);
206  const AVComponentDescriptor comp = desc->comp[i];
207  if (swidth < 0 || dwidth < 0) {
208  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
209  return AVERROR(EINVAL);
210  }
211  sheight = src->height;
212  dheight = dst->height;
213  if (i) {
214  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
215  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
216  }
217  //fill right padding
218  for (y = 0; y < sheight; y++) {
219  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
220  av_memcpy_backptr(line_ptr,
221  comp.depth > 8 ? 2 : 1,
222  dwidth - swidth);
223  }
224  //fill bottom padding
225  for (y = sheight; y < dheight; y++) {
226  memcpy(dst->data[i]+y*dst->linesize[i],
227  dst->data[i]+(sheight-1)*dst->linesize[i],
228  dwidth);
229  }
230  }
231  return 0;
232 }
233 
235 {
236  AVQSVDeviceContext *hwctx = ctx->hwctx;
237  QSVDeviceContext *s = ctx->internal->priv;
238  int hw_handle_supported = 0;
239  mfxHandleType handle_type;
240  enum AVHWDeviceType device_type;
241  enum AVPixelFormat pix_fmt;
242  mfxStatus err;
243 
244  err = MFXQueryIMPL(hwctx->session, &s->impl);
245  if (err == MFX_ERR_NONE)
246  err = MFXQueryVersion(hwctx->session, &s->ver);
247  if (err != MFX_ERR_NONE) {
248  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
249  return AVERROR_UNKNOWN;
250  }
251 
252  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
253 #if CONFIG_VAAPI
254  handle_type = MFX_HANDLE_VA_DISPLAY;
255  device_type = AV_HWDEVICE_TYPE_VAAPI;
257  hw_handle_supported = 1;
258 #endif
259  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
260 #if CONFIG_D3D11VA
261  handle_type = MFX_HANDLE_D3D11_DEVICE;
262  device_type = AV_HWDEVICE_TYPE_D3D11VA;
264  hw_handle_supported = 1;
265 #endif
266  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
267 #if CONFIG_DXVA2
268  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
269  device_type = AV_HWDEVICE_TYPE_DXVA2;
271  hw_handle_supported = 1;
272 #endif
273  }
274 
275  if (hw_handle_supported) {
276  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
277  if (err == MFX_ERR_NONE) {
278  s->handle_type = handle_type;
279  s->child_device_type = device_type;
280  s->child_pix_fmt = pix_fmt;
281  }
282  }
283  if (!s->handle) {
284  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
285  "from the session\n");
286  }
287  return 0;
288 }
289 
291 {
292  QSVFramesContext *s = ctx->internal->priv;
293 
294  if (s->session_download) {
295  MFXVideoVPP_Close(s->session_download);
296  MFXClose(s->session_download);
297  }
298  s->session_download = NULL;
299  s->session_download_init = 0;
300 
301  if (s->session_upload) {
302  MFXVideoVPP_Close(s->session_upload);
303  MFXClose(s->session_upload);
304  }
305  s->session_upload = NULL;
306  s->session_upload_init = 0;
307 
308 #if HAVE_PTHREADS
309  pthread_mutex_destroy(&s->session_lock);
310 #endif
311 
312  av_freep(&s->mem_ids);
313 #if QSV_HAVE_OPAQUE
314  av_freep(&s->surface_ptrs);
315 #endif
316  av_freep(&s->surfaces_internal);
317  av_freep(&s->handle_pairs_internal);
318  av_frame_unref(&s->realigned_upload_frame);
319  av_frame_unref(&s->realigned_download_frame);
320  av_buffer_unref(&s->child_frames_ref);
321 }
322 
323 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
324 {
325 }
326 
327 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
328 {
330  QSVFramesContext *s = ctx->internal->priv;
331  AVQSVFramesContext *hwctx = ctx->hwctx;
332 
333  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
334  s->nb_surfaces_used++;
335  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
336  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
337  }
338 
339  return NULL;
340 }
341 
343 {
344  AVQSVFramesContext *hwctx = ctx->hwctx;
345  QSVFramesContext *s = ctx->internal->priv;
346  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
347 
348  AVBufferRef *child_device_ref = NULL;
349  AVBufferRef *child_frames_ref = NULL;
350 
351  AVHWDeviceContext *child_device_ctx;
352  AVHWFramesContext *child_frames_ctx;
353 
354  int i, ret = 0;
355 
356  if (!device_priv->handle) {
358  "Cannot create a non-opaque internal surface pool without "
359  "a hardware handle\n");
360  return AVERROR(EINVAL);
361  }
362 
363  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
364  if (!child_device_ref)
365  return AVERROR(ENOMEM);
366  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
367 
368 #if CONFIG_VAAPI
369  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
370  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
371  child_device_hwctx->display = (VADisplay)device_priv->handle;
372  }
373 #endif
374 #if CONFIG_D3D11VA
375  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
376  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
377  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
378  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
379  }
380 #endif
381 #if CONFIG_DXVA2
382  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
383  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
384  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
385  }
386 #endif
387 
388  ret = av_hwdevice_ctx_init(child_device_ref);
389  if (ret < 0) {
390  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
391  goto fail;
392  }
393 
394  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
395  if (!child_frames_ref) {
396  ret = AVERROR(ENOMEM);
397  goto fail;
398  }
399  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
400 
401  child_frames_ctx->format = device_priv->child_pix_fmt;
402  child_frames_ctx->sw_format = ctx->sw_format;
403  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
404  child_frames_ctx->width = FFALIGN(ctx->width, 16);
405  child_frames_ctx->height = FFALIGN(ctx->height, 16);
406 
407 #if CONFIG_D3D11VA
408  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
409  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
410  if (hwctx->frame_type == 0)
411  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
412  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
413  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
414  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
415  }
416 #endif
417 #if CONFIG_DXVA2
418  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
419  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
420  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
421  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
422  else
423  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
424  }
425 #endif
426 
427  ret = av_hwframe_ctx_init(child_frames_ref);
428  if (ret < 0) {
429  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
430  goto fail;
431  }
432 
433 #if CONFIG_VAAPI
434  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
435  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
436  for (i = 0; i < ctx->initial_pool_size; i++) {
437  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
438  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
439  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
440  }
441  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
442  }
443 #endif
444 #if CONFIG_D3D11VA
445  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
446  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
447  for (i = 0; i < ctx->initial_pool_size; i++) {
448  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
449  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
450  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
451  } else {
452  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
453  }
454  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
455  }
456  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
457  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
458  } else {
459  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
460  }
461  }
462 #endif
463 #if CONFIG_DXVA2
464  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
465  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
466  for (i = 0; i < ctx->initial_pool_size; i++) {
467  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
468  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
469  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
470  }
471  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
472  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
473  else
474  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
475  }
476 #endif
477 
478  s->child_frames_ref = child_frames_ref;
479  child_frames_ref = NULL;
480 
481 fail:
482  av_buffer_unref(&child_device_ref);
483  av_buffer_unref(&child_frames_ref);
484  return ret;
485 }
486 
487 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
488 {
489  const AVPixFmtDescriptor *desc;
490  uint32_t fourcc;
491 
492  desc = av_pix_fmt_desc_get(ctx->sw_format);
493  if (!desc)
494  return AVERROR(EINVAL);
495 
496  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
497  if (!fourcc)
498  return AVERROR(EINVAL);
499 
500  surf->Info.BitDepthLuma = desc->comp[0].depth;
501  surf->Info.BitDepthChroma = desc->comp[0].depth;
502  surf->Info.Shift = desc->comp[0].depth > 8;
503 
504  if (desc->log2_chroma_w && desc->log2_chroma_h)
505  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
506  else if (desc->log2_chroma_w)
507  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
508  else
509  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
510 
511  surf->Info.FourCC = fourcc;
512  surf->Info.Width = FFALIGN(ctx->width, 16);
513  surf->Info.CropW = ctx->width;
514  surf->Info.Height = FFALIGN(ctx->height, 16);
515  surf->Info.CropH = ctx->height;
516  surf->Info.FrameRateExtN = 25;
517  surf->Info.FrameRateExtD = 1;
518  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
519 
520  return 0;
521 }
522 
524 {
525  QSVFramesContext *s = ctx->internal->priv;
526  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
527 
528  int i, ret = 0;
529 
530  if (ctx->initial_pool_size <= 0) {
531  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
532  return AVERROR(EINVAL);
533  }
534 
535  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
536  sizeof(*s->handle_pairs_internal));
537  if (!s->handle_pairs_internal)
538  return AVERROR(ENOMEM);
539 
540  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
541  sizeof(*s->surfaces_internal));
542  if (!s->surfaces_internal)
543  return AVERROR(ENOMEM);
544 
545  for (i = 0; i < ctx->initial_pool_size; i++) {
546  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
547  if (ret < 0)
548  return ret;
549  }
550 
551 #if QSV_HAVE_OPAQUE
552  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
554  if (ret < 0)
555  return ret;
556  }
557 #else
559  if (ret < 0)
560  return ret;
561 #endif
562 
563  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
565  if (!ctx->internal->pool_internal)
566  return AVERROR(ENOMEM);
567 
568  frames_hwctx->surfaces = s->surfaces_internal;
569  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
570 
571  return 0;
572 }
573 
574 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
575  mfxFrameAllocResponse *resp)
576 {
577  AVHWFramesContext *ctx = pthis;
578  QSVFramesContext *s = ctx->internal->priv;
579  AVQSVFramesContext *hwctx = ctx->hwctx;
580  mfxFrameInfo *i = &req->Info;
581  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
582 
583  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
584  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
585  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
586  return MFX_ERR_UNSUPPORTED;
587  if (i->Width > i1->Width || i->Height > i1->Height ||
588  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
589  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
590  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
591  i->Width, i->Height, i->FourCC, i->ChromaFormat,
592  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
593  return MFX_ERR_UNSUPPORTED;
594  }
595 
596  resp->mids = s->mem_ids;
597  resp->NumFrameActual = hwctx->nb_surfaces;
598 
599  return MFX_ERR_NONE;
600 }
601 
602 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
603 {
604  return MFX_ERR_NONE;
605 }
606 
607 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
608 {
609  return MFX_ERR_UNSUPPORTED;
610 }
611 
612 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
613 {
614  return MFX_ERR_UNSUPPORTED;
615 }
616 
617 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
618 {
619  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
620  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
621 
622  pair_dst->first = pair_src->first;
623 
624  if (pair_src->second != (mfxMemId)MFX_INFINITE)
625  pair_dst->second = pair_src->second;
626  return MFX_ERR_NONE;
627 }
628 
629 #if QSV_ONEVPL
630 
631 static int qsv_d3d11_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
632 {
633 #if CONFIG_D3D11VA
634  mfxStatus sts;
635  IDXGIAdapter *pDXGIAdapter;
636  DXGI_ADAPTER_DESC adapterDesc;
637  IDXGIDevice *pDXGIDevice = NULL;
638  HRESULT hr;
639  ID3D11Device *device = handle;
640  mfxVariant impl_value;
641 
642  hr = ID3D11Device_QueryInterface(device, &IID_IDXGIDevice, (void**)&pDXGIDevice);
643  if (SUCCEEDED(hr)) {
644  hr = IDXGIDevice_GetAdapter(pDXGIDevice, &pDXGIAdapter);
645  if (FAILED(hr)) {
646  av_log(ctx, AV_LOG_ERROR, "Error IDXGIDevice_GetAdapter %d\n", hr);
647  goto fail;
648  }
649 
650  hr = IDXGIAdapter_GetDesc(pDXGIAdapter, &adapterDesc);
651  if (FAILED(hr)) {
652  av_log(ctx, AV_LOG_ERROR, "Error IDXGIAdapter_GetDesc %d\n", hr);
653  goto fail;
654  }
655  } else {
656  av_log(ctx, AV_LOG_ERROR, "Error ID3D11Device_QueryInterface %d\n", hr);
657  goto fail;
658  }
659 
660  impl_value.Type = MFX_VARIANT_TYPE_U16;
661  impl_value.Data.U16 = adapterDesc.DeviceId;
662  sts = MFXSetConfigFilterProperty(cfg,
663  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
664  if (sts != MFX_ERR_NONE) {
665  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
666  "DeviceID property: %d.\n", sts);
667  goto fail;
668  }
669 
670  impl_value.Type = MFX_VARIANT_TYPE_PTR;
671  impl_value.Data.Ptr = &adapterDesc.AdapterLuid;
672  sts = MFXSetConfigFilterProperty(cfg,
673  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
674  if (sts != MFX_ERR_NONE) {
675  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
676  "DeviceLUID property: %d.\n", sts);
677  goto fail;
678  }
679 
680  impl_value.Type = MFX_VARIANT_TYPE_U32;
681  impl_value.Data.U32 = 0x0001;
682  sts = MFXSetConfigFilterProperty(cfg,
683  (const mfxU8 *)"mfxExtendedDeviceId.LUIDDeviceNodeMask", impl_value);
684  if (sts != MFX_ERR_NONE) {
685  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
686  "LUIDDeviceNodeMask property: %d.\n", sts);
687  goto fail;
688  }
689 
690  return 0;
691 
692 fail:
693 #endif
694  return AVERROR_UNKNOWN;
695 }
696 
697 static int qsv_d3d9_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
698 {
699  int ret = AVERROR_UNKNOWN;
700 #if CONFIG_DXVA2
701  mfxStatus sts;
702  IDirect3DDeviceManager9* devmgr = handle;
703  IDirect3DDevice9Ex *device = NULL;
704  HANDLE device_handle = 0;
705  IDirect3D9Ex *d3d9ex = NULL;
706  LUID luid;
707  D3DDEVICE_CREATION_PARAMETERS params;
708  HRESULT hr;
709  mfxVariant impl_value;
710 
711  hr = IDirect3DDeviceManager9_OpenDeviceHandle(devmgr, &device_handle);
712  if (FAILED(hr)) {
713  av_log(ctx, AV_LOG_ERROR, "Error OpenDeviceHandle %d\n", hr);
714  goto fail;
715  }
716 
717  hr = IDirect3DDeviceManager9_LockDevice(devmgr, device_handle, &device, TRUE);
718  if (FAILED(hr)) {
719  av_log(ctx, AV_LOG_ERROR, "Error LockDevice %d\n", hr);
720  goto fail;
721  }
722 
723  hr = IDirect3DDevice9Ex_GetCreationParameters(device, &params);
724  if (FAILED(hr)) {
725  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9_GetCreationParameters %d\n", hr);
726  goto unlock;
727  }
728 
729  hr = IDirect3DDevice9Ex_GetDirect3D(device, &d3d9ex);
730  if (FAILED(hr)) {
731  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
732  goto unlock;
733  }
734 
735  hr = IDirect3D9Ex_GetAdapterLUID(d3d9ex, params.AdapterOrdinal, &luid);
736  if (FAILED(hr)) {
737  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
738  goto unlock;
739  }
740 
741  impl_value.Type = MFX_VARIANT_TYPE_PTR;
742  impl_value.Data.Ptr = &luid;
743  sts = MFXSetConfigFilterProperty(cfg,
744  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
745  if (sts != MFX_ERR_NONE) {
746  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
747  "DeviceLUID property: %d.\n", sts);
748  goto unlock;
749  }
750 
751  ret = 0;
752 
753 unlock:
754  IDirect3DDeviceManager9_UnlockDevice(devmgr, device_handle, FALSE);
755 fail:
756 #endif
757  return ret;
758 }
759 
760 static int qsv_va_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
761 {
762 #if CONFIG_VAAPI
763 #if VA_CHECK_VERSION(1, 15, 0)
764  mfxStatus sts;
765  VADisplay dpy = handle;
766  VAStatus vas;
767  VADisplayAttribute attr = {
768  .type = VADisplayPCIID,
769  };
770  mfxVariant impl_value;
771 
772  vas = vaGetDisplayAttributes(dpy, &attr, 1);
773  if (vas == VA_STATUS_SUCCESS && attr.flags != VA_DISPLAY_ATTRIB_NOT_SUPPORTED) {
774  impl_value.Type = MFX_VARIANT_TYPE_U16;
775  impl_value.Data.U16 = (attr.value & 0xFFFF);
776  sts = MFXSetConfigFilterProperty(cfg,
777  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
778  if (sts != MFX_ERR_NONE) {
779  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
780  "DeviceID property: %d.\n", sts);
781  goto fail;
782  }
783  } else {
784  av_log(ctx, AV_LOG_ERROR, "libva: Failed to get device id from the driver. Please "
785  "consider to upgrade the driver to support VA-API 1.15.0\n");
786  goto fail;
787  }
788 
789  return 0;
790 
791 fail:
792 #else
793  av_log(ctx, AV_LOG_ERROR, "libva: This version of libva doesn't support retrieving "
794  "the device information from the driver. Please consider to upgrade libva to "
795  "support VA-API 1.15.0\n");
796 #endif
797 #endif
798  return AVERROR_UNKNOWN;
799 }
800 
801 static int qsv_new_mfx_loader(void *ctx,
802  mfxHDL handle,
803  mfxHandleType handle_type,
804  mfxIMPL implementation,
805  mfxVersion *pver,
806  void **ploader)
807 {
808  mfxStatus sts;
809  mfxLoader loader = NULL;
810  mfxConfig cfg;
811  mfxVariant impl_value;
812 
813  *ploader = NULL;
814  loader = MFXLoad();
815  if (!loader) {
816  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
817  goto fail;
818  }
819 
820  /* Create configurations for implementation */
821  cfg = MFXCreateConfig(loader);
822  if (!cfg) {
823  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX configuration\n");
824  goto fail;
825  }
826 
827  impl_value.Type = MFX_VARIANT_TYPE_U32;
828  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
829  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
830  sts = MFXSetConfigFilterProperty(cfg,
831  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
832  if (sts != MFX_ERR_NONE) {
833  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
834  "property: %d.\n", sts);
835  goto fail;
836  }
837 
838  impl_value.Type = MFX_VARIANT_TYPE_U32;
839  impl_value.Data.U32 = pver->Version;
840  sts = MFXSetConfigFilterProperty(cfg,
841  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
842  impl_value);
843  if (sts != MFX_ERR_NONE) {
844  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
845  "property: %d.\n", sts);
846  goto fail;
847  }
848 
849  impl_value.Type = MFX_VARIANT_TYPE_U16;
850  impl_value.Data.U16 = 0x8086; // Intel device only
851  sts = MFXSetConfigFilterProperty(cfg,
852  (const mfxU8 *)"mfxExtendedDeviceId.VendorID", impl_value);
853  if (sts != MFX_ERR_NONE) {
854  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
855  "VendorID property: %d.\n", sts);
856  goto fail;
857  }
858 
859  if (MFX_HANDLE_VA_DISPLAY == handle_type) {
860  if (handle && qsv_va_update_config(ctx, handle, cfg))
861  goto fail;
862 
863  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_VAAPI;
864  } else if (MFX_HANDLE_D3D9_DEVICE_MANAGER == handle_type) {
865  if (handle && qsv_d3d9_update_config(ctx, handle, cfg))
866  goto fail;
867 
868  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D9;
869  } else {
870  if (handle && qsv_d3d11_update_config(ctx, handle, cfg))
871  goto fail;
872 
873  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D11;
874  }
875 
876  impl_value.Type = MFX_VARIANT_TYPE_U32;
877  sts = MFXSetConfigFilterProperty(cfg,
878  (const mfxU8 *)"mfxImplDescription.AccelerationMode", impl_value);
879  if (sts != MFX_ERR_NONE) {
880  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
881  "AccelerationMode property: %d.\n", sts);
882  goto fail;
883  }
884 
885  *ploader = loader;
886 
887  return 0;
888 
889 fail:
890  if (loader)
891  MFXUnload(loader);
892 
893  return AVERROR_UNKNOWN;
894 }
895 
896 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
897 {
898  mfxStatus sts;
899  mfxSession session = NULL;
900  uint32_t impl_idx = 0;
901  mfxVersion ver;
902 
903  while (1) {
904  /* Enumerate all implementations */
905  mfxImplDescription *impl_desc;
906 
907  sts = MFXEnumImplementations(loader, impl_idx,
908  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
909  (mfxHDL *)&impl_desc);
910  /* Failed to find an available implementation */
911  if (sts == MFX_ERR_NOT_FOUND)
912  break;
913  else if (sts != MFX_ERR_NONE) {
914  impl_idx++;
915  continue;
916  }
917 
918  sts = MFXCreateSession(loader, impl_idx, &session);
919  MFXDispReleaseImplDescription(loader, impl_desc);
920  if (sts == MFX_ERR_NONE)
921  break;
922 
923  impl_idx++;
924  }
925 
926  if (sts != MFX_ERR_NONE) {
927  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
928  goto fail;
929  }
930 
931  sts = MFXQueryVersion(session, &ver);
932  if (sts != MFX_ERR_NONE) {
933  av_log(ctx, AV_LOG_ERROR, "Error querying a MFX session: %d.\n", sts);
934  goto fail;
935  }
936 
937  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
938  "version is %d.%d\n", ver.Major, ver.Minor);
939 
940  *psession = session;
941 
942  return 0;
943 
944 fail:
945  if (session)
946  MFXClose(session);
947 
948  return AVERROR_UNKNOWN;
949 }
950 
951 static int qsv_create_mfx_session(void *ctx,
952  mfxHDL handle,
953  mfxHandleType handle_type,
954  mfxIMPL implementation,
955  mfxVersion *pver,
956  mfxSession *psession,
957  void **ploader)
958 {
959  mfxLoader loader = NULL;
960 
962  "Use Intel(R) oneVPL to create MFX session, API version is "
963  "%d.%d, the required implementation version is %d.%d\n",
964  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
965 
966  if (handle_type != MFX_HANDLE_VA_DISPLAY &&
967  handle_type != MFX_HANDLE_D3D9_DEVICE_MANAGER &&
968  handle_type != MFX_HANDLE_D3D11_DEVICE) {
970  "Invalid MFX device handle type\n");
971  return AVERROR(EXDEV);
972  }
973 
974  *psession = NULL;
975 
976  if (!*ploader) {
977  if (qsv_new_mfx_loader(ctx, handle, handle_type, implementation, pver, (void **)&loader))
978  goto fail;
979 
980  av_assert0(loader);
981  } else
982  loader = *ploader; // Use the input mfxLoader to create mfx session
983 
984  if (qsv_create_mfx_session_from_loader(ctx, loader, psession))
985  goto fail;
986 
987  if (!*ploader)
988  *ploader = loader;
989 
990  return 0;
991 
992 fail:
993  if (!*ploader && loader)
994  MFXUnload(loader);
995 
996  return AVERROR_UNKNOWN;
997 }
998 
999 #else
1000 
1001 static int qsv_create_mfx_session(void *ctx,
1002  mfxHDL handle,
1003  mfxHandleType handle_type,
1004  mfxIMPL implementation,
1005  mfxVersion *pver,
1006  mfxSession *psession,
1007  void **ploader)
1008 {
1009  mfxVersion ver;
1010  mfxStatus sts;
1011  mfxSession session = NULL;
1012 
1014  "Use Intel(R) Media SDK to create MFX session, API version is "
1015  "%d.%d, the required implementation version is %d.%d\n",
1016  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1017 
1018  *ploader = NULL;
1019  *psession = NULL;
1020  ver = *pver;
1021  sts = MFXInit(implementation, &ver, &session);
1022  if (sts != MFX_ERR_NONE) {
1023  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1024  "%d.\n", sts);
1025  goto fail;
1026  }
1027 
1028  sts = MFXQueryVersion(session, &ver);
1029  if (sts != MFX_ERR_NONE) {
1030  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: "
1031  "%d.\n", sts);
1032  goto fail;
1033  }
1034 
1035  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
1036  "version is %d.%d\n", ver.Major, ver.Minor);
1037 
1038  MFXClose(session);
1039 
1040  sts = MFXInit(implementation, &ver, &session);
1041  if (sts != MFX_ERR_NONE) {
1042  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1043  "%d.\n", sts);
1044  goto fail;
1045  }
1046 
1047  *psession = session;
1048 
1049  return 0;
1050 
1051 fail:
1052  if (session)
1053  MFXClose(session);
1054 
1055  return AVERROR_UNKNOWN;
1056 }
1057 
1058 #endif
1059 
1061  mfxSession *session, int upload)
1062 {
1063  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
1064  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
1065  int opaque = 0;
1066 
1067  mfxFrameAllocator frame_allocator = {
1068  .pthis = ctx,
1069  .Alloc = frame_alloc,
1070  .Lock = frame_lock,
1071  .Unlock = frame_unlock,
1072  .GetHDL = frame_get_hdl,
1073  .Free = frame_free,
1074  };
1075 
1076  mfxVideoParam par;
1077  mfxStatus err;
1078  int ret = AVERROR_UNKNOWN;
1079  AVQSVDeviceContext *hwctx = ctx->device_ctx->hwctx;
1080  /* hwctx->loader is non-NULL for oneVPL user and NULL for non-oneVPL user */
1081  void **loader = &hwctx->loader;
1082 
1083 #if QSV_HAVE_OPAQUE
1084  QSVFramesContext *s = ctx->internal->priv;
1085  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1086 #endif
1087 
1088  ret = qsv_create_mfx_session(ctx, device_priv->handle, device_priv->handle_type,
1089  device_priv->impl, &device_priv->ver, session, loader);
1090  if (ret)
1091  goto fail;
1092 
1093  if (device_priv->handle) {
1094  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
1095  device_priv->handle);
1096  if (err != MFX_ERR_NONE) {
1097  ret = AVERROR_UNKNOWN;
1098  goto fail;
1099  }
1100  }
1101 
1102  if (!opaque) {
1103  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
1104  if (err != MFX_ERR_NONE) {
1105  ret = AVERROR_UNKNOWN;
1106  goto fail;
1107  }
1108  }
1109 
1110  memset(&par, 0, sizeof(par));
1111 
1112  if (!opaque) {
1113  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
1114  MFX_IOPATTERN_IN_VIDEO_MEMORY;
1115  }
1116 #if QSV_HAVE_OPAQUE
1117  else {
1118  par.ExtParam = s->ext_buffers;
1119  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
1120  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
1121  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
1122  }
1123 #endif
1124 
1125  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
1126  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
1127  par.AsyncDepth = 1;
1128 
1129  par.vpp.In = frames_hwctx->surfaces[0].Info;
1130 
1131  /* Apparently VPP requires the frame rate to be set to some value, otherwise
1132  * init will fail (probably for the framerate conversion filter). Since we
1133  * are only doing data upload/download here, we just invent an arbitrary
1134  * value */
1135  par.vpp.In.FrameRateExtN = 25;
1136  par.vpp.In.FrameRateExtD = 1;
1137  par.vpp.Out = par.vpp.In;
1138 
1139  err = MFXVideoVPP_Init(*session, &par);
1140  if (err != MFX_ERR_NONE) {
1141  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
1142  "Surface upload/download will not be possible\n");
1143 
1144  ret = AVERROR_UNKNOWN;
1145  goto fail;
1146  }
1147 
1148  return 0;
1149 
1150 fail:
1151  if (*session)
1152  MFXClose(*session);
1153 
1154  *session = NULL;
1155 
1156  return ret;
1157 }
1158 
1160 {
1161  QSVFramesContext *s = ctx->internal->priv;
1162  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
1163 
1164  int opaque = 0;
1165 
1166  uint32_t fourcc;
1167  int i, ret;
1168 
1169 #if QSV_HAVE_OPAQUE
1170  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1171 #endif
1172 
1173  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
1174  if (!fourcc) {
1175  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
1176  return AVERROR(ENOSYS);
1177  }
1178 
1179  if (!ctx->pool) {
1181  if (ret < 0) {
1182  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
1183  return ret;
1184  }
1185  }
1186 
1187  if (!opaque) {
1188  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
1189  if (!s->mem_ids)
1190  return AVERROR(ENOMEM);
1191 
1192  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1193  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
1194  }
1195 #if QSV_HAVE_OPAQUE
1196  else {
1197  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
1198  sizeof(*s->surface_ptrs));
1199  if (!s->surface_ptrs)
1200  return AVERROR(ENOMEM);
1201 
1202  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1203  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
1204 
1205  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
1206  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
1207  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
1208 
1209  s->opaque_alloc.Out = s->opaque_alloc.In;
1210 
1211  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
1212  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
1213 
1214  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
1215  }
1216 #endif
1217 
1218  s->session_download = NULL;
1219  s->session_upload = NULL;
1220 
1221  s->session_download_init = 0;
1222  s->session_upload_init = 0;
1223 
1224 #if HAVE_PTHREADS
1225  pthread_mutex_init(&s->session_lock, NULL);
1226 #endif
1227 
1228  return 0;
1229 }
1230 
1232 {
1233  frame->buf[0] = av_buffer_pool_get(ctx->pool);
1234  if (!frame->buf[0])
1235  return AVERROR(ENOMEM);
1236 
1237  frame->data[3] = frame->buf[0]->data;
1238  frame->format = AV_PIX_FMT_QSV;
1239  frame->width = ctx->width;
1240  frame->height = ctx->height;
1241 
1242  return 0;
1243 }
1244 
1246  enum AVHWFrameTransferDirection dir,
1247  enum AVPixelFormat **formats)
1248 {
1249  enum AVPixelFormat *fmts;
1250 
1251  fmts = av_malloc_array(2, sizeof(*fmts));
1252  if (!fmts)
1253  return AVERROR(ENOMEM);
1254 
1255  fmts[0] = ctx->sw_format;
1256  fmts[1] = AV_PIX_FMT_NONE;
1257 
1258  *formats = fmts;
1259 
1260  return 0;
1261 }
1262 
1264  AVHWFramesContext *src_ctx, int flags)
1265 {
1266  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
1267  int i;
1268 
1269  switch (dst_ctx->device_ctx->type) {
1270 #if CONFIG_VAAPI
1272  {
1273  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
1274  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
1275  sizeof(*dst_hwctx->surface_ids));
1276  if (!dst_hwctx->surface_ids)
1277  return AVERROR(ENOMEM);
1278  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1279  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1280  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
1281  }
1282  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1283  }
1284  break;
1285 #endif
1286 #if CONFIG_D3D11VA
1288  {
1289  D3D11_TEXTURE2D_DESC texDesc;
1290  dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
1291  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
1292  dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
1293  sizeof(*dst_hwctx->texture_infos));
1294  if (!dst_hwctx->texture_infos)
1295  return AVERROR(ENOMEM);
1296  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
1297  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
1298  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1299  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1300  dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
1301  dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
1302  }
1303  ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
1304  dst_hwctx->BindFlags = texDesc.BindFlags;
1305  }
1306  break;
1307 #endif
1308 #if CONFIG_DXVA2
1310  {
1311  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
1312  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
1313  sizeof(*dst_hwctx->surfaces));
1314  if (!dst_hwctx->surfaces)
1315  return AVERROR(ENOMEM);
1316  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1317  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1318  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
1319  }
1320  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1321  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
1322  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
1323  else
1324  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
1325  }
1326  break;
1327 #endif
1328  default:
1329  return AVERROR(ENOSYS);
1330  }
1331 
1332  return 0;
1333 }
1334 
1336  AVFrame *dst, const AVFrame *src, int flags)
1337 {
1338  QSVFramesContext *s = ctx->internal->priv;
1339  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
1340  AVHWFramesContext *child_frames_ctx;
1341  const AVPixFmtDescriptor *desc;
1342  uint8_t *child_data;
1343  AVFrame *dummy;
1344  int ret = 0;
1345 
1346  if (!s->child_frames_ref)
1347  return AVERROR(ENOSYS);
1348  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1349 
1350  switch (child_frames_ctx->device_ctx->type) {
1351 #if CONFIG_VAAPI
1353  {
1354  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1355  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
1356  * we need this casting for vaapi.
1357  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
1358  * to avoid compile warning */
1359  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
1360  break;
1361  }
1362 #endif
1363 #if CONFIG_D3D11VA
1365  {
1366  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1367  child_data = pair->first;
1368  break;
1369  }
1370 #endif
1371 #if CONFIG_DXVA2
1373  {
1374  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1375  child_data = pair->first;
1376  break;
1377  }
1378 #endif
1379  default:
1380  return AVERROR(ENOSYS);
1381  }
1382 
1383  if (dst->format == child_frames_ctx->format) {
1384  ret = ff_hwframe_map_create(s->child_frames_ref,
1385  dst, src, NULL, NULL);
1386  if (ret < 0)
1387  return ret;
1388 
1389  dst->width = src->width;
1390  dst->height = src->height;
1391 
1392  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1393  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1394  dst->data[0] = pair->first;
1395  dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1396  } else {
1397  dst->data[3] = child_data;
1398  }
1399 
1400  return 0;
1401  }
1402 
1404  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
1405  // This only supports mapping to software.
1406  return AVERROR(ENOSYS);
1407  }
1408 
1409  dummy = av_frame_alloc();
1410  if (!dummy)
1411  return AVERROR(ENOMEM);
1412 
1413  dummy->buf[0] = av_buffer_ref(src->buf[0]);
1414  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
1415  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
1416  goto fail;
1417 
1418  dummy->format = child_frames_ctx->format;
1419  dummy->width = src->width;
1420  dummy->height = src->height;
1421 
1422  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1423  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1424  dummy->data[0] = pair->first;
1425  dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1426  } else {
1427  dummy->data[3] = child_data;
1428  }
1429 
1430  ret = av_hwframe_map(dst, dummy, flags);
1431 
1432 fail:
1433  av_frame_free(&dummy);
1434 
1435  return ret;
1436 }
1437 
1439  const AVFrame *src)
1440 {
1441  QSVFramesContext *s = ctx->internal->priv;
1442  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1443  int download = !!src->hw_frames_ctx;
1444  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
1445 
1446  AVFrame *dummy;
1447  int ret;
1448 
1449  dummy = av_frame_alloc();
1450  if (!dummy)
1451  return AVERROR(ENOMEM);
1452 
1453  dummy->format = child_frames_ctx->format;
1454  dummy->width = src->width;
1455  dummy->height = src->height;
1456  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
1457  dummy->data[3] = surf->Data.MemId;
1458  dummy->hw_frames_ctx = s->child_frames_ref;
1459 
1460  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
1462 
1463  dummy->buf[0] = NULL;
1464  dummy->data[3] = NULL;
1465  dummy->hw_frames_ctx = NULL;
1466 
1467  av_frame_free(&dummy);
1468 
1469  return ret;
1470 }
1471 
1472 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
1473 {
1474  switch (frame->format) {
1475  case AV_PIX_FMT_NV12:
1476  case AV_PIX_FMT_P010:
1477  surface->Data.Y = frame->data[0];
1478  surface->Data.UV = frame->data[1];
1479  break;
1480 
1481  case AV_PIX_FMT_YUV420P:
1482  surface->Data.Y = frame->data[0];
1483  surface->Data.U = frame->data[1];
1484  surface->Data.V = frame->data[2];
1485  break;
1486 
1487  case AV_PIX_FMT_BGRA:
1488  surface->Data.B = frame->data[0];
1489  surface->Data.G = frame->data[0] + 1;
1490  surface->Data.R = frame->data[0] + 2;
1491  surface->Data.A = frame->data[0] + 3;
1492  break;
1493 #if CONFIG_VAAPI
1494  case AV_PIX_FMT_YUYV422:
1495  surface->Data.Y = frame->data[0];
1496  surface->Data.U = frame->data[0] + 1;
1497  surface->Data.V = frame->data[0] + 3;
1498  break;
1499 
1500  case AV_PIX_FMT_Y210:
1501  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1502  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1503  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1504  break;
1505 #endif
1506  default:
1507  return MFX_ERR_UNSUPPORTED;
1508  }
1509  surface->Data.Pitch = frame->linesize[0];
1510  surface->Data.TimeStamp = frame->pts;
1511 
1512  return 0;
1513 }
1514 
1516 {
1517  QSVFramesContext *s = ctx->internal->priv;
1518  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1519  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1520  int ret = 0;
1521 
1522  if (atomic_load(inited))
1523  return 0;
1524 
1525 #if HAVE_PTHREADS
1526  pthread_mutex_lock(&s->session_lock);
1527 #endif
1528 
1529  if (!atomic_load(inited)) {
1530  ret = qsv_init_internal_session(ctx, session, upload);
1531  atomic_store(inited, 1);
1532  }
1533 
1534 #if HAVE_PTHREADS
1535  pthread_mutex_unlock(&s->session_lock);
1536 #endif
1537 
1538  return ret;
1539 }
1540 
1542  const AVFrame *src)
1543 {
1544  QSVFramesContext *s = ctx->internal->priv;
1545  mfxFrameSurface1 out = {{ 0 }};
1546  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1547 
1548  mfxSyncPoint sync = NULL;
1549  mfxStatus err;
1550  int ret = 0;
1551  /* download to temp frame if the output is not padded as libmfx requires */
1552  AVFrame *tmp_frame = &s->realigned_download_frame;
1553  AVFrame *dst_frame;
1554  int realigned = 0;
1555 
1557  if (ret < 0)
1558  return ret;
1559 
1560  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1561  * Height must be a multiple of 16 for progressive frame sequence and a
1562  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1563  if (dst->height & 15 || dst->linesize[0] & 15) {
1564  realigned = 1;
1565  if (tmp_frame->format != dst->format ||
1566  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1567  tmp_frame->height != FFALIGN(dst->height, 16)) {
1568  av_frame_unref(tmp_frame);
1569 
1570  tmp_frame->format = dst->format;
1571  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1572  tmp_frame->height = FFALIGN(dst->height, 16);
1573  ret = av_frame_get_buffer(tmp_frame, 0);
1574  if (ret < 0)
1575  return ret;
1576  }
1577  }
1578 
1579  dst_frame = realigned ? tmp_frame : dst;
1580 
1581  if (!s->session_download) {
1582  if (s->child_frames_ref)
1583  return qsv_transfer_data_child(ctx, dst_frame, src);
1584 
1585  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1586  return AVERROR(ENOSYS);
1587  }
1588 
1589  out.Info = in->Info;
1590  map_frame_to_surface(dst_frame, &out);
1591 
1592  do {
1593  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1594  if (err == MFX_WRN_DEVICE_BUSY)
1595  av_usleep(1);
1596  } while (err == MFX_WRN_DEVICE_BUSY);
1597 
1598  if (err < 0 || !sync) {
1599  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1600  return AVERROR_UNKNOWN;
1601  }
1602 
1603  do {
1604  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1605  } while (err == MFX_WRN_IN_EXECUTION);
1606  if (err < 0) {
1607  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1608  return AVERROR_UNKNOWN;
1609  }
1610 
1611  if (realigned) {
1612  tmp_frame->width = dst->width;
1613  tmp_frame->height = dst->height;
1614  ret = av_frame_copy(dst, tmp_frame);
1615  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1616  tmp_frame->height = FFALIGN(dst->height, 16);
1617  if (ret < 0)
1618  return ret;
1619  }
1620 
1621  return 0;
1622 }
1623 
1625  const AVFrame *src)
1626 {
1627  QSVFramesContext *s = ctx->internal->priv;
1628  mfxFrameSurface1 in = {{ 0 }};
1629  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1630  mfxFrameInfo tmp_info;
1631 
1632  mfxSyncPoint sync = NULL;
1633  mfxStatus err;
1634  int ret = 0;
1635  /* make a copy if the input is not padded as libmfx requires */
1636  AVFrame *tmp_frame = &s->realigned_upload_frame;
1637  const AVFrame *src_frame;
1638  int realigned = 0;
1639 
1641  if (ret < 0)
1642  return ret;
1643 
1644  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1645  * Height must be a multiple of 16 for progressive frame sequence and a
1646  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1647  if (src->height & 15 || src->linesize[0] & 15) {
1648  realigned = 1;
1649  if (tmp_frame->format != src->format ||
1650  tmp_frame->width != FFALIGN(src->width, 16) ||
1651  tmp_frame->height != FFALIGN(src->height, 16)) {
1652  av_frame_unref(tmp_frame);
1653 
1654  tmp_frame->format = src->format;
1655  tmp_frame->width = FFALIGN(src->width, 16);
1656  tmp_frame->height = FFALIGN(src->height, 16);
1657  ret = av_frame_get_buffer(tmp_frame, 0);
1658  if (ret < 0)
1659  return ret;
1660  }
1661  ret = av_frame_copy(tmp_frame, src);
1662  if (ret < 0) {
1663  av_frame_unref(tmp_frame);
1664  return ret;
1665  }
1666  ret = qsv_fill_border(tmp_frame, src);
1667  if (ret < 0) {
1668  av_frame_unref(tmp_frame);
1669  return ret;
1670  }
1671 
1672  tmp_info = out->Info;
1673  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1674  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1675  }
1676 
1677  src_frame = realigned ? tmp_frame : src;
1678 
1679  if (!s->session_upload) {
1680  if (s->child_frames_ref)
1681  return qsv_transfer_data_child(ctx, dst, src_frame);
1682 
1683  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1684  return AVERROR(ENOSYS);
1685  }
1686 
1687  in.Info = out->Info;
1688  map_frame_to_surface(src_frame, &in);
1689 
1690  do {
1691  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1692  if (err == MFX_WRN_DEVICE_BUSY)
1693  av_usleep(1);
1694  } while (err == MFX_WRN_DEVICE_BUSY);
1695 
1696  if (err < 0 || !sync) {
1697  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1698  return AVERROR_UNKNOWN;
1699  }
1700 
1701  do {
1702  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1703  } while (err == MFX_WRN_IN_EXECUTION);
1704  if (err < 0) {
1705  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1706  return AVERROR_UNKNOWN;
1707  }
1708 
1709  if (realigned) {
1710  out->Info.CropW = tmp_info.CropW;
1711  out->Info.CropH = tmp_info.CropH;
1712  }
1713 
1714  return 0;
1715 }
1716 
1718  AVHWFramesContext *src_ctx, int flags)
1719 {
1720  QSVFramesContext *s = dst_ctx->internal->priv;
1721  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1722  int i;
1723 
1724  if (src_ctx->initial_pool_size == 0) {
1725  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1726  "mapped to QSV frames.\n");
1727  return AVERROR(EINVAL);
1728  }
1729 
1730  switch (src_ctx->device_ctx->type) {
1731 #if CONFIG_VAAPI
1733  {
1734  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1735  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1736  sizeof(*s->handle_pairs_internal));
1737  if (!s->handle_pairs_internal)
1738  return AVERROR(ENOMEM);
1739  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1740  sizeof(*s->surfaces_internal));
1741  if (!s->surfaces_internal)
1742  return AVERROR(ENOMEM);
1743  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1744  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1745  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1746  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1747  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1748  }
1749  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1750  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1751  }
1752  break;
1753 #endif
1754 #if CONFIG_D3D11VA
1756  {
1757  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1758  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1759  sizeof(*s->handle_pairs_internal));
1760  if (!s->handle_pairs_internal)
1761  return AVERROR(ENOMEM);
1762  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1763  sizeof(*s->surfaces_internal));
1764  if (!s->surfaces_internal)
1765  return AVERROR(ENOMEM);
1766  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1767  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1768  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1769  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1770  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1771  } else {
1772  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1773  }
1774  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1775  }
1776  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1777  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1778  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1779  } else {
1780  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1781  }
1782  }
1783  break;
1784 #endif
1785 #if CONFIG_DXVA2
1787  {
1788  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1789  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1790  sizeof(*s->handle_pairs_internal));
1791  if (!s->handle_pairs_internal)
1792  return AVERROR(ENOMEM);
1793  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1794  sizeof(*s->surfaces_internal));
1795  if (!s->surfaces_internal)
1796  return AVERROR(ENOMEM);
1797  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1798  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1799  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1800  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1801  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1802  }
1803  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1804  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1805  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1806  else
1807  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1808  }
1809  break;
1810 #endif
1811  default:
1812  return AVERROR(ENOSYS);
1813  }
1814 
1815  dst_hwctx->surfaces = s->surfaces_internal;
1816 
1817  return 0;
1818 }
1819 
1820 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1821  AVFrame *dst, const AVFrame *src, int flags)
1822 {
1823  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1824  int i, err, index = -1;
1825 
1826  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1827  switch(src->format) {
1828 #if CONFIG_VAAPI
1829  case AV_PIX_FMT_VAAPI:
1830  {
1831  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1832  if (*(VASurfaceID*)pair->first == (VASurfaceID)src->data[3]) {
1833  index = i;
1834  break;
1835  }
1836  }
1837 #endif
1838 #if CONFIG_D3D11VA
1839  case AV_PIX_FMT_D3D11:
1840  {
1841  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1842  if (pair->first == src->data[0]
1843  && (pair->second == src->data[1]
1844  || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
1845  index = i;
1846  break;
1847  }
1848  }
1849 #endif
1850 #if CONFIG_DXVA2
1851  case AV_PIX_FMT_DXVA2_VLD:
1852  {
1853  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1854  if (pair->first == src->data[3]) {
1855  index = i;
1856  break;
1857  }
1858  }
1859 #endif
1860  }
1861  }
1862  if (index < 0) {
1863  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1864  "is not in the mapped frames context.\n");
1865  return AVERROR(EINVAL);
1866  }
1867 
1869  dst, src, NULL, NULL);
1870  if (err)
1871  return err;
1872 
1873  dst->width = src->width;
1874  dst->height = src->height;
1875  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1876 
1877  return 0;
1878 }
1879 
1881  const void *hwconfig,
1882  AVHWFramesConstraints *constraints)
1883 {
1884  int i;
1885 
1887  sizeof(*constraints->valid_sw_formats));
1888  if (!constraints->valid_sw_formats)
1889  return AVERROR(ENOMEM);
1890 
1891  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1892  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1894 
1895  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1896  if (!constraints->valid_hw_formats)
1897  return AVERROR(ENOMEM);
1898 
1899  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1900  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1901 
1902  return 0;
1903 }
1904 
1906 {
1907  AVQSVDeviceContext *hwctx = ctx->hwctx;
1908  QSVDevicePriv *priv = ctx->user_opaque;
1909 
1910  if (hwctx->session)
1911  MFXClose(hwctx->session);
1912 
1913  if (hwctx->loader)
1914  MFXUnload(hwctx->loader);
1916  av_freep(&priv);
1917 }
1918 
1919 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1920 {
1921  static const struct {
1922  const char *name;
1923  mfxIMPL impl;
1924  } impl_map[] = {
1925  { "auto", MFX_IMPL_AUTO },
1926  { "sw", MFX_IMPL_SOFTWARE },
1927  { "hw", MFX_IMPL_HARDWARE },
1928  { "auto_any", MFX_IMPL_AUTO_ANY },
1929  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1930  { "hw2", MFX_IMPL_HARDWARE2 },
1931  { "hw3", MFX_IMPL_HARDWARE3 },
1932  { "hw4", MFX_IMPL_HARDWARE4 },
1933  };
1934 
1935  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1936  int i;
1937 
1938  if (device) {
1939  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1940  if (!strcmp(device, impl_map[i].name)) {
1941  impl = impl_map[i].impl;
1942  break;
1943  }
1944  if (i == FF_ARRAY_ELEMS(impl_map))
1945  impl = strtol(device, NULL, 0);
1946  }
1947 
1948  if (impl != MFX_IMPL_SOFTWARE) {
1949  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1950  impl |= MFX_IMPL_VIA_D3D11;
1951  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1952  impl |= MFX_IMPL_VIA_D3D9;
1953  }
1954 
1955  return impl;
1956 }
1957 
1959  mfxIMPL implementation,
1960  AVHWDeviceContext *child_device_ctx,
1961  int flags)
1962 {
1963  AVQSVDeviceContext *hwctx = ctx->hwctx;
1964 
1965  mfxVersion ver = { { 3, 1 } };
1966  mfxHDL handle;
1967  mfxHandleType handle_type;
1968  mfxStatus err;
1969  int ret;
1970 
1971  switch (child_device_ctx->type) {
1972 #if CONFIG_VAAPI
1974  {
1975  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1976  handle_type = MFX_HANDLE_VA_DISPLAY;
1977  handle = (mfxHDL)child_device_hwctx->display;
1978  }
1979  break;
1980 #endif
1981 #if CONFIG_D3D11VA
1983  {
1984  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1985  handle_type = MFX_HANDLE_D3D11_DEVICE;
1986  handle = (mfxHDL)child_device_hwctx->device;
1987  }
1988  break;
1989 #endif
1990 #if CONFIG_DXVA2
1992  {
1993  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1994  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1995  handle = (mfxHDL)child_device_hwctx->devmgr;
1996  }
1997  break;
1998 #endif
1999  default:
2000  ret = AVERROR(ENOSYS);
2001  goto fail;
2002  }
2003 
2004  ret = qsv_create_mfx_session(ctx, handle, handle_type, implementation, &ver,
2005  &hwctx->session, &hwctx->loader);
2006  if (ret)
2007  goto fail;
2008 
2009  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
2010  if (err != MFX_ERR_NONE) {
2011  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
2012  "%d\n", err);
2013  ret = AVERROR_UNKNOWN;
2014  goto fail;
2015  }
2016 
2017  return 0;
2018 
2019 fail:
2020  if (hwctx->session)
2021  MFXClose(hwctx->session);
2022 
2023  if (hwctx->loader)
2024  MFXUnload(hwctx->loader);
2025 
2026  hwctx->session = NULL;
2027  hwctx->loader = NULL;
2028  return ret;
2029 }
2030 
2032  AVHWDeviceContext *child_device_ctx,
2033  AVDictionary *opts, int flags)
2034 {
2035  mfxIMPL impl;
2036  impl = choose_implementation("hw_any", child_device_ctx->type);
2037  return qsv_device_derive_from_child(ctx, impl,
2038  child_device_ctx, flags);
2039 }
2040 
2041 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
2042  AVDictionary *opts, int flags)
2043 {
2044  QSVDevicePriv *priv;
2045  enum AVHWDeviceType child_device_type;
2046  AVHWDeviceContext *child_device;
2047  AVDictionary *child_device_opts;
2048  AVDictionaryEntry *e;
2049 
2050  mfxIMPL impl;
2051  int ret;
2052 
2053  priv = av_mallocz(sizeof(*priv));
2054  if (!priv)
2055  return AVERROR(ENOMEM);
2056 
2057  ctx->user_opaque = priv;
2058  ctx->free = qsv_device_free;
2059 
2060  e = av_dict_get(opts, "child_device_type", NULL, 0);
2061  if (e) {
2062  child_device_type = av_hwdevice_find_type_by_name(e->value);
2063  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
2064  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
2065  "\"%s\".\n", e->value);
2066  return AVERROR(EINVAL);
2067  }
2068  } else if (CONFIG_VAAPI) {
2069  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
2070 #if QSV_ONEVPL
2071  } else if (CONFIG_D3D11VA) { // Use D3D11 by default if d3d11va is enabled
2073  "Defaulting child_device_type to AV_HWDEVICE_TYPE_D3D11VA for oneVPL."
2074  "Please explicitly set child device type via \"-init_hw_device\" "
2075  "option if needed.\n");
2076  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2077  } else if (CONFIG_DXVA2) {
2078  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2079 #else
2080  } else if (CONFIG_DXVA2) {
2082  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
2083  "with old commandlines. This behaviour will be removed "
2084  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
2085  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2086  } else if (CONFIG_D3D11VA) {
2087  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2088 #endif
2089  } else {
2090  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2091  return AVERROR(ENOSYS);
2092  }
2093 
2094  child_device_opts = NULL;
2095  switch (child_device_type) {
2096 #if CONFIG_VAAPI
2098  {
2099  // libmfx does not actually implement VAAPI properly, rather it
2100  // depends on the specific behaviour of a matching iHD driver when
2101  // used on recent Intel hardware. Set options to the VAAPI device
2102  // creation so that we should pick a usable setup by default if
2103  // possible, even when multiple devices and drivers are available.
2104  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
2105  av_dict_set(&child_device_opts, "driver", "iHD", 0);
2106  }
2107  break;
2108 #endif
2109 #if CONFIG_D3D11VA
2111  break;
2112 #endif
2113 #if CONFIG_DXVA2
2115 #if QSV_ONEVPL
2116  {
2118  "d3d11va is not available or child device type is set to dxva2 "
2119  "explicitly for oneVPL.\n");
2120  }
2121 #endif
2122  break;
2123 #endif
2124  default:
2125  {
2126  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2127  return AVERROR(ENOSYS);
2128  }
2129  break;
2130  }
2131 
2132  e = av_dict_get(opts, "child_device", NULL, 0);
2133  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
2134  e ? e->value : NULL, child_device_opts, 0);
2135 
2136  av_dict_free(&child_device_opts);
2137  if (ret < 0)
2138  return ret;
2139 
2140  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
2141 
2142  impl = choose_implementation(device, child_device_type);
2143 
2144  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
2145 }
2146 
2149  .name = "QSV",
2150 
2151  .device_hwctx_size = sizeof(AVQSVDeviceContext),
2152  .device_priv_size = sizeof(QSVDeviceContext),
2153  .frames_hwctx_size = sizeof(AVQSVFramesContext),
2154  .frames_priv_size = sizeof(QSVFramesContext),
2155 
2156  .device_create = qsv_device_create,
2157  .device_derive = qsv_device_derive,
2158  .device_init = qsv_device_init,
2159  .frames_get_constraints = qsv_frames_get_constraints,
2160  .frames_init = qsv_frames_init,
2161  .frames_uninit = qsv_frames_uninit,
2162  .frames_get_buffer = qsv_get_buffer,
2163  .transfer_get_formats = qsv_transfer_get_formats,
2164  .transfer_data_to = qsv_transfer_data_to,
2165  .transfer_data_from = qsv_transfer_data_from,
2166  .map_to = qsv_map_to,
2167  .map_from = qsv_map_from,
2168  .frames_derive_to = qsv_frames_derive_to,
2169  .frames_derive_from = qsv_frames_derive_from,
2170 
2171  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
2172 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1438
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:60
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:259
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:86
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:91
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1624
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2703
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1335
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:159
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:186
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:116
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:75
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
AVFrame::width
int width
Definition: frame.h:397
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2031
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1263
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:790
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:487
data
const char data[16]
Definition: mxf.c:146
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1919
QSVDeviceContext
Definition: hwcontext_qsv.c:72
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:83
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
supported_pixel_formats
static const struct @318 supported_pixel_formats[]
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:738
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:111
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:201
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:74
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1541
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:68
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:78
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:342
fail
#define fail()
Definition: checkasm.h:131
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
dummy
int dummy
Definition: motion.c:65
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1880
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:84
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1717
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:104
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:602
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:463
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:143
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:103
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:86
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:574
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:455
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:69
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:256
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:73
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:97
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
MFXUnload
#define MFXUnload(a)
Definition: hwcontext_qsv.c:65
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:2147
qsv_create_mfx_session
static int qsv_create_mfx_session(void *ctx, mfxHDL handle, mfxHandleType handle_type, mfxIMPL implementation, mfxVersion *pver, mfxSession *psession, void **ploader)
Definition: hwcontext_qsv.c:1001
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:54
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:290
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:58
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1515
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:1159
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:1472
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:612
index
int index
Definition: gxfenc.c:89
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:77
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:105
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:1060
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:102
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:1231
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:771
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:94
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1905
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:132
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:1245
buffer.h
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:1958
AVQSVDeviceContext::loader
void * loader
The mfxLoader handle used for mfxSession creation.
Definition: hwcontext_qsv.h:47
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:305
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:55
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:617
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:234
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:93
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:100
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:487
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:83
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1820
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:110
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:76
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:106
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:444
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:607
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:664
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:323
AVFrame::height
int height
Definition: frame.h:397
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:79
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:85
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2041
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:460
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:92
AVDictionaryEntry
Definition: dict.h:79
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:109
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:327
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:81
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:523
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:73