FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdint.h>
20 #include <string.h>
21 
22 #include <mfx/mfxvideo.h>
23 
24 #include "config.h"
25 
26 #if HAVE_PTHREADS
27 #include <pthread.h>
28 #endif
29 
30 #define COBJMACROS
31 #if CONFIG_VAAPI
32 #include "hwcontext_vaapi.h"
33 #endif
34 #if CONFIG_D3D11VA
35 #include "hwcontext_d3d11va.h"
36 #endif
37 #if CONFIG_DXVA2
38 #include "hwcontext_dxva2.h"
39 #endif
40 
41 #include "buffer.h"
42 #include "common.h"
43 #include "hwcontext.h"
44 #include "hwcontext_internal.h"
45 #include "hwcontext_qsv.h"
46 #include "mem.h"
47 #include "pixfmt.h"
48 #include "pixdesc.h"
49 #include "time.h"
50 
51 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
52  (MFX_VERSION_MAJOR > (MAJOR) || \
53  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
54 
55 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
56 
57 typedef struct QSVDevicePriv {
60 
61 typedef struct QSVDeviceContext {
62  mfxHDL handle;
63  mfxHandleType handle_type;
64  mfxVersion ver;
65  mfxIMPL impl;
66 
70 
71 typedef struct QSVFramesContext {
72  mfxSession session_download;
74  mfxSession session_upload;
76 #if HAVE_PTHREADS
77  pthread_mutex_t session_lock;
78  pthread_cond_t session_cond;
79 #endif
80 
82  mfxFrameSurface1 *surfaces_internal;
83  mfxHDLPair *handle_pairs_internal;
85 
86  // used in the frame allocator for non-opaque surfaces
87  mfxMemId *mem_ids;
88  // used in the opaque alloc request for opaque surfaces
89  mfxFrameSurface1 **surface_ptrs;
90 
91  mfxExtOpaqueSurfaceAlloc opaque_alloc;
92  mfxExtBuffer *ext_buffers[1];
94 
95 static const struct {
97  uint32_t fourcc;
99  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
100  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
101  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
102  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
103 #if CONFIG_VAAPI
105  MFX_FOURCC_YUY2 },
106 #if QSV_VERSION_ATLEAST(1, 27)
107  { AV_PIX_FMT_Y210,
108  MFX_FOURCC_Y210 },
109 #endif
110 #endif
111 };
112 
114 {
115  int i;
116  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
118  return supported_pixel_formats[i].fourcc;
119  }
120  return 0;
121 }
122 
123 #if CONFIG_D3D11VA
124 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
125 {
126  uint32_t bind_flags = 0;
127 
128  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
129  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
130  else
131  bind_flags = D3D11_BIND_DECODER;
132 
133  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
134  bind_flags = D3D11_BIND_RENDER_TARGET;
135 
136  return bind_flags;
137 }
138 #endif
139 
141 {
142  AVQSVDeviceContext *hwctx = ctx->hwctx;
143  QSVDeviceContext *s = ctx->internal->priv;
144  int hw_handle_supported = 0;
145  mfxHandleType handle_type;
146  enum AVHWDeviceType device_type;
147  enum AVPixelFormat pix_fmt;
148  mfxStatus err;
149 
150  err = MFXQueryIMPL(hwctx->session, &s->impl);
151  if (err == MFX_ERR_NONE)
152  err = MFXQueryVersion(hwctx->session, &s->ver);
153  if (err != MFX_ERR_NONE) {
154  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
155  return AVERROR_UNKNOWN;
156  }
157 
158  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
159 #if CONFIG_VAAPI
160  handle_type = MFX_HANDLE_VA_DISPLAY;
161  device_type = AV_HWDEVICE_TYPE_VAAPI;
163  hw_handle_supported = 1;
164 #endif
165  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
166 #if CONFIG_D3D11VA
167  handle_type = MFX_HANDLE_D3D11_DEVICE;
168  device_type = AV_HWDEVICE_TYPE_D3D11VA;
170  hw_handle_supported = 1;
171 #endif
172  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
173 #if CONFIG_DXVA2
174  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
175  device_type = AV_HWDEVICE_TYPE_DXVA2;
177  hw_handle_supported = 1;
178 #endif
179  }
180 
181  if (hw_handle_supported) {
182  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
183  if (err == MFX_ERR_NONE) {
184  s->handle_type = handle_type;
185  s->child_device_type = device_type;
186  s->child_pix_fmt = pix_fmt;
187  }
188  }
189  if (!s->handle) {
190  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
191  "from the session\n");
192  }
193  return 0;
194 }
195 
197 {
198  QSVFramesContext *s = ctx->internal->priv;
199 
200  if (s->session_download) {
201  MFXVideoVPP_Close(s->session_download);
202  MFXClose(s->session_download);
203  }
204  s->session_download = NULL;
205  s->session_download_init = 0;
206 
207  if (s->session_upload) {
208  MFXVideoVPP_Close(s->session_upload);
209  MFXClose(s->session_upload);
210  }
211  s->session_upload = NULL;
212  s->session_upload_init = 0;
213 
214 #if HAVE_PTHREADS
215  pthread_mutex_destroy(&s->session_lock);
216  pthread_cond_destroy(&s->session_cond);
217 #endif
218 
219  av_freep(&s->mem_ids);
220  av_freep(&s->surface_ptrs);
221  av_freep(&s->surfaces_internal);
222  av_freep(&s->handle_pairs_internal);
223  av_buffer_unref(&s->child_frames_ref);
224 }
225 
226 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
227 {
228 }
229 
230 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
231 {
233  QSVFramesContext *s = ctx->internal->priv;
234  AVQSVFramesContext *hwctx = ctx->hwctx;
235 
236  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
237  s->nb_surfaces_used++;
238  av_buffer_create((uint8_t*)(s->handle_pairs_internal + s->nb_surfaces_used - 1),
239  sizeof(*s->handle_pairs_internal), qsv_pool_release_dummy, NULL, 0);
240  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
241  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
242  }
243 
244  return NULL;
245 }
246 
248 {
249  AVQSVFramesContext *hwctx = ctx->hwctx;
250  QSVFramesContext *s = ctx->internal->priv;
251  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
252 
253  AVBufferRef *child_device_ref = NULL;
254  AVBufferRef *child_frames_ref = NULL;
255 
256  AVHWDeviceContext *child_device_ctx;
257  AVHWFramesContext *child_frames_ctx;
258 
259  int i, ret = 0;
260 
261  if (!device_priv->handle) {
263  "Cannot create a non-opaque internal surface pool without "
264  "a hardware handle\n");
265  return AVERROR(EINVAL);
266  }
267 
268  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
269  if (!child_device_ref)
270  return AVERROR(ENOMEM);
271  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
272 
273 #if CONFIG_VAAPI
274  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
275  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
276  child_device_hwctx->display = (VADisplay)device_priv->handle;
277  }
278 #endif
279 #if CONFIG_D3D11VA
280  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
281  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
282  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
283  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
284  }
285 #endif
286 #if CONFIG_DXVA2
287  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
288  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
289  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
290  }
291 #endif
292 
293  ret = av_hwdevice_ctx_init(child_device_ref);
294  if (ret < 0) {
295  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
296  goto fail;
297  }
298 
299  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
300  if (!child_frames_ref) {
301  ret = AVERROR(ENOMEM);
302  goto fail;
303  }
304  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
305 
306  child_frames_ctx->format = device_priv->child_pix_fmt;
307  child_frames_ctx->sw_format = ctx->sw_format;
308  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
309  child_frames_ctx->width = FFALIGN(ctx->width, 16);
310  child_frames_ctx->height = FFALIGN(ctx->height, 16);
311 
312 #if CONFIG_D3D11VA
313  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
314  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
315  if (hwctx->frame_type == 0)
316  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
317  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
318  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
319  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
320  }
321 #endif
322 #if CONFIG_DXVA2
323  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
324  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
325  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
326  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
327  else
328  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
329  }
330 #endif
331 
332  ret = av_hwframe_ctx_init(child_frames_ref);
333  if (ret < 0) {
334  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
335  goto fail;
336  }
337 
338 #if CONFIG_VAAPI
339  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
340  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
341  for (i = 0; i < ctx->initial_pool_size; i++) {
342  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
343  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
344  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
345  }
346  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
347  }
348 #endif
349 #if CONFIG_D3D11VA
350  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
351  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
352  for (i = 0; i < ctx->initial_pool_size; i++) {
353  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
354  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
355  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
356  } else {
357  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
358  }
359  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
360  }
361  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
362  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
363  } else {
364  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
365  }
366  }
367 #endif
368 #if CONFIG_DXVA2
369  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
370  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
371  for (i = 0; i < ctx->initial_pool_size; i++) {
372  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
373  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
374  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
375  }
376  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
377  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
378  else
379  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
380  }
381 #endif
382 
383  s->child_frames_ref = child_frames_ref;
384  child_frames_ref = NULL;
385 
386 fail:
387  av_buffer_unref(&child_device_ref);
388  av_buffer_unref(&child_frames_ref);
389  return ret;
390 }
391 
392 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
393 {
394  const AVPixFmtDescriptor *desc;
395  uint32_t fourcc;
396 
397  desc = av_pix_fmt_desc_get(ctx->sw_format);
398  if (!desc)
399  return AVERROR(EINVAL);
400 
401  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
402  if (!fourcc)
403  return AVERROR(EINVAL);
404 
405  surf->Info.BitDepthLuma = desc->comp[0].depth;
406  surf->Info.BitDepthChroma = desc->comp[0].depth;
407  surf->Info.Shift = desc->comp[0].depth > 8;
408 
409  if (desc->log2_chroma_w && desc->log2_chroma_h)
410  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
411  else if (desc->log2_chroma_w)
412  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
413  else
414  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
415 
416  surf->Info.FourCC = fourcc;
417  surf->Info.Width = FFALIGN(ctx->width, 16);
418  surf->Info.CropW = ctx->width;
419  surf->Info.Height = FFALIGN(ctx->height, 16);
420  surf->Info.CropH = ctx->height;
421  surf->Info.FrameRateExtN = 25;
422  surf->Info.FrameRateExtD = 1;
423  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
424 
425  return 0;
426 }
427 
429 {
430  QSVFramesContext *s = ctx->internal->priv;
431  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
432 
433  int i, ret = 0;
434 
435  if (ctx->initial_pool_size <= 0) {
436  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
437  return AVERROR(EINVAL);
438  }
439 
440  s->handle_pairs_internal = av_mallocz_array(ctx->initial_pool_size,
441  sizeof(*s->handle_pairs_internal));
442  if (!s->handle_pairs_internal)
443  return AVERROR(ENOMEM);
444 
445  s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
446  sizeof(*s->surfaces_internal));
447  if (!s->surfaces_internal)
448  return AVERROR(ENOMEM);
449 
450  for (i = 0; i < ctx->initial_pool_size; i++) {
451  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
452  if (ret < 0)
453  return ret;
454  }
455 
456  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
458  if (ret < 0)
459  return ret;
460  }
461 
462  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
464  if (!ctx->internal->pool_internal)
465  return AVERROR(ENOMEM);
466 
467  frames_hwctx->surfaces = s->surfaces_internal;
468  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
469 
470  return 0;
471 }
472 
473 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
474  mfxFrameAllocResponse *resp)
475 {
476  AVHWFramesContext *ctx = pthis;
477  QSVFramesContext *s = ctx->internal->priv;
478  AVQSVFramesContext *hwctx = ctx->hwctx;
479  mfxFrameInfo *i = &req->Info;
480  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
481 
482  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
483  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
484  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
485  return MFX_ERR_UNSUPPORTED;
486  if (i->Width > i1->Width || i->Height > i1->Height ||
487  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
488  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
489  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
490  i->Width, i->Height, i->FourCC, i->ChromaFormat,
491  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
492  return MFX_ERR_UNSUPPORTED;
493  }
494 
495  resp->mids = s->mem_ids;
496  resp->NumFrameActual = hwctx->nb_surfaces;
497 
498  return MFX_ERR_NONE;
499 }
500 
501 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
502 {
503  return MFX_ERR_NONE;
504 }
505 
506 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
507 {
508  return MFX_ERR_UNSUPPORTED;
509 }
510 
511 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
512 {
513  return MFX_ERR_UNSUPPORTED;
514 }
515 
516 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
517 {
518  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
519  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
520 
521  pair_dst->first = pair_src->first;
522 
523  if (pair_src->second != (mfxMemId)MFX_INFINITE)
524  pair_dst->second = pair_src->second;
525  return MFX_ERR_NONE;
526 }
527 
529  mfxSession *session, int upload)
530 {
531  QSVFramesContext *s = ctx->internal->priv;
532  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
533  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
534  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
535 
536  mfxFrameAllocator frame_allocator = {
537  .pthis = ctx,
538  .Alloc = frame_alloc,
539  .Lock = frame_lock,
540  .Unlock = frame_unlock,
541  .GetHDL = frame_get_hdl,
542  .Free = frame_free,
543  };
544 
545  mfxVideoParam par;
546  mfxStatus err;
547 
548  err = MFXInit(device_priv->impl, &device_priv->ver, session);
549  if (err != MFX_ERR_NONE) {
550  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
551  return AVERROR_UNKNOWN;
552  }
553 
554  if (device_priv->handle) {
555  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
556  device_priv->handle);
557  if (err != MFX_ERR_NONE)
558  return AVERROR_UNKNOWN;
559  }
560 
561  if (!opaque) {
562  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
563  if (err != MFX_ERR_NONE)
564  return AVERROR_UNKNOWN;
565  }
566 
567  memset(&par, 0, sizeof(par));
568 
569  if (opaque) {
570  par.ExtParam = s->ext_buffers;
571  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
572  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
573  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
574  } else {
575  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
576  MFX_IOPATTERN_IN_VIDEO_MEMORY;
577  }
578 
579  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
580  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
581  par.AsyncDepth = 1;
582 
583  par.vpp.In = frames_hwctx->surfaces[0].Info;
584 
585  /* Apparently VPP requires the frame rate to be set to some value, otherwise
586  * init will fail (probably for the framerate conversion filter). Since we
587  * are only doing data upload/download here, we just invent an arbitrary
588  * value */
589  par.vpp.In.FrameRateExtN = 25;
590  par.vpp.In.FrameRateExtD = 1;
591  par.vpp.Out = par.vpp.In;
592 
593  err = MFXVideoVPP_Init(*session, &par);
594  if (err != MFX_ERR_NONE) {
595  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
596  "Surface upload/download will not be possible\n");
597  MFXClose(*session);
598  *session = NULL;
599  }
600 
601  return 0;
602 }
603 
605 {
606  QSVFramesContext *s = ctx->internal->priv;
607  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
608 
609  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
610 
611  uint32_t fourcc;
612  int i, ret;
613 
614  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
615  if (!fourcc) {
616  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
617  return AVERROR(ENOSYS);
618  }
619 
620  if (!ctx->pool) {
622  if (ret < 0) {
623  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
624  return ret;
625  }
626  }
627 
628  if (opaque) {
629  s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
630  sizeof(*s->surface_ptrs));
631  if (!s->surface_ptrs)
632  return AVERROR(ENOMEM);
633 
634  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
635  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
636 
637  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
638  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
639  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
640 
641  s->opaque_alloc.Out = s->opaque_alloc.In;
642 
643  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
644  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
645 
646  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
647  } else {
648  s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
649  if (!s->mem_ids)
650  return AVERROR(ENOMEM);
651 
652  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
653  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
654  }
655 
656  s->session_download = NULL;
657  s->session_upload = NULL;
658 
659  s->session_download_init = 0;
660  s->session_upload_init = 0;
661 
662 #if HAVE_PTHREADS
663  pthread_mutex_init(&s->session_lock, NULL);
664  pthread_cond_init(&s->session_cond, NULL);
665 #endif
666 
667  return 0;
668 }
669 
671 {
672  frame->buf[0] = av_buffer_pool_get(ctx->pool);
673  if (!frame->buf[0])
674  return AVERROR(ENOMEM);
675 
676  frame->data[3] = frame->buf[0]->data;
677  frame->format = AV_PIX_FMT_QSV;
678  frame->width = ctx->width;
679  frame->height = ctx->height;
680 
681  return 0;
682 }
683 
686  enum AVPixelFormat **formats)
687 {
688  enum AVPixelFormat *fmts;
689 
690  fmts = av_malloc_array(2, sizeof(*fmts));
691  if (!fmts)
692  return AVERROR(ENOMEM);
693 
694  fmts[0] = ctx->sw_format;
695  fmts[1] = AV_PIX_FMT_NONE;
696 
697  *formats = fmts;
698 
699  return 0;
700 }
701 
703  AVHWFramesContext *src_ctx, int flags)
704 {
705  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
706  int i;
707 
708  switch (dst_ctx->device_ctx->type) {
709 #if CONFIG_VAAPI
711  {
712  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
713  dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
714  sizeof(*dst_hwctx->surface_ids));
715  if (!dst_hwctx->surface_ids)
716  return AVERROR(ENOMEM);
717  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
718  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
719  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
720  }
721  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
722  }
723  break;
724 #endif
725 #if CONFIG_D3D11VA
727  {
728  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
729  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
730  dst_hwctx->texture = (ID3D11Texture2D*)pair->first;
731  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
732  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
733  dst_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(src_hwctx->frame_type);
734  }
735  break;
736 #endif
737 #if CONFIG_DXVA2
739  {
740  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
741  dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
742  sizeof(*dst_hwctx->surfaces));
743  if (!dst_hwctx->surfaces)
744  return AVERROR(ENOMEM);
745  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
746  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
747  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
748  }
749  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
750  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
751  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
752  else
753  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
754  }
755  break;
756 #endif
757  default:
758  return AVERROR(ENOSYS);
759  }
760 
761  return 0;
762 }
763 
765  AVFrame *dst, const AVFrame *src, int flags)
766 {
767  QSVFramesContext *s = ctx->internal->priv;
768  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
769  AVHWFramesContext *child_frames_ctx;
770  const AVPixFmtDescriptor *desc;
771  uint8_t *child_data;
772  AVFrame *dummy;
773  int ret = 0;
774 
775  if (!s->child_frames_ref)
776  return AVERROR(ENOSYS);
777  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
778 
779  switch (child_frames_ctx->device_ctx->type) {
780 #if CONFIG_VAAPI
782  {
783  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
784  child_data = pair->first;
785  break;
786  }
787 #endif
788 #if CONFIG_D3D11VA
790  {
791  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
792  child_data = pair->first;
793  break;
794  }
795 #endif
796 #if CONFIG_DXVA2
798  {
799  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
800  child_data = pair->first;
801  break;
802  }
803 #endif
804  default:
805  return AVERROR(ENOSYS);
806  }
807 
808  if (dst->format == child_frames_ctx->format) {
809  ret = ff_hwframe_map_create(s->child_frames_ref,
810  dst, src, NULL, NULL);
811  if (ret < 0)
812  return ret;
813 
814  dst->width = src->width;
815  dst->height = src->height;
816 
817  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
818  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
819  dst->data[0] = pair->first;
820  dst->data[1] = pair->second;
821  } else {
822  dst->data[3] = child_data;
823  }
824 
825  return 0;
826  }
827 
829  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
830  // This only supports mapping to software.
831  return AVERROR(ENOSYS);
832  }
833 
834  dummy = av_frame_alloc();
835  if (!dummy)
836  return AVERROR(ENOMEM);
837 
838  dummy->buf[0] = av_buffer_ref(src->buf[0]);
839  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
840  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
841  goto fail;
842 
843  dummy->format = child_frames_ctx->format;
844  dummy->width = src->width;
845  dummy->height = src->height;
846 
847  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
848  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
849  dummy->data[0] = pair->first;
850  dummy->data[1] = pair->second;
851  } else {
852  dummy->data[3] = child_data;
853  }
854 
855  ret = av_hwframe_map(dst, dummy, flags);
856 
857 fail:
859 
860  return ret;
861 }
862 
864  const AVFrame *src)
865 {
866  QSVFramesContext *s = ctx->internal->priv;
867  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
868  int download = !!src->hw_frames_ctx;
869  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
870 
871  AVFrame *dummy;
872  int ret;
873 
874  dummy = av_frame_alloc();
875  if (!dummy)
876  return AVERROR(ENOMEM);
877 
878  dummy->format = child_frames_ctx->format;
879  dummy->width = src->width;
880  dummy->height = src->height;
881  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
882  dummy->data[3] = surf->Data.MemId;
883  dummy->hw_frames_ctx = s->child_frames_ref;
884 
885  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
887 
888  dummy->buf[0] = NULL;
889  dummy->data[3] = NULL;
890  dummy->hw_frames_ctx = NULL;
891 
893 
894  return ret;
895 }
896 
897 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
898 {
899  switch (frame->format) {
900  case AV_PIX_FMT_NV12:
901  case AV_PIX_FMT_P010:
902  surface->Data.Y = frame->data[0];
903  surface->Data.UV = frame->data[1];
904  break;
905 
906  case AV_PIX_FMT_YUV420P:
907  surface->Data.Y = frame->data[0];
908  surface->Data.U = frame->data[1];
909  surface->Data.V = frame->data[2];
910  break;
911 
912  case AV_PIX_FMT_BGRA:
913  surface->Data.B = frame->data[0];
914  surface->Data.G = frame->data[0] + 1;
915  surface->Data.R = frame->data[0] + 2;
916  surface->Data.A = frame->data[0] + 3;
917  break;
918 #if CONFIG_VAAPI
919  case AV_PIX_FMT_YUYV422:
920  surface->Data.Y = frame->data[0];
921  surface->Data.U = frame->data[0] + 1;
922  surface->Data.V = frame->data[0] + 3;
923  break;
924 
925  case AV_PIX_FMT_Y210:
926  surface->Data.Y16 = (mfxU16 *)frame->data[0];
927  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
928  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
929  break;
930 #endif
931  default:
932  return MFX_ERR_UNSUPPORTED;
933  }
934  surface->Data.Pitch = frame->linesize[0];
935  surface->Data.TimeStamp = frame->pts;
936 
937  return 0;
938 }
939 
941  const AVFrame *src)
942 {
943  QSVFramesContext *s = ctx->internal->priv;
944  mfxFrameSurface1 out = {{ 0 }};
945  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
946 
947  mfxSyncPoint sync = NULL;
948  mfxStatus err;
949  int ret = 0;
950 
951  while (!s->session_download_init && !s->session_download && !ret) {
952 #if HAVE_PTHREADS
953  if (pthread_mutex_trylock(&s->session_lock) == 0) {
954 #endif
955  if (!s->session_download_init) {
956  ret = qsv_init_internal_session(ctx, &s->session_download, 0);
957  if (s->session_download)
958  s->session_download_init = 1;
959  }
960 #if HAVE_PTHREADS
961  pthread_mutex_unlock(&s->session_lock);
962  pthread_cond_signal(&s->session_cond);
963  } else {
964  pthread_mutex_lock(&s->session_lock);
965  while (!s->session_download_init && !s->session_download) {
966  pthread_cond_wait(&s->session_cond, &s->session_lock);
967  }
968  pthread_mutex_unlock(&s->session_lock);
969  }
970 #endif
971  }
972 
973  if (ret < 0)
974  return ret;
975 
976  if (!s->session_download) {
977  if (s->child_frames_ref)
978  return qsv_transfer_data_child(ctx, dst, src);
979 
980  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
981  return AVERROR(ENOSYS);
982  }
983 
984  out.Info = in->Info;
985  map_frame_to_surface(dst, &out);
986 
987  do {
988  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
989  if (err == MFX_WRN_DEVICE_BUSY)
990  av_usleep(1);
991  } while (err == MFX_WRN_DEVICE_BUSY);
992 
993  if (err < 0 || !sync) {
994  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
995  return AVERROR_UNKNOWN;
996  }
997 
998  do {
999  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1000  } while (err == MFX_WRN_IN_EXECUTION);
1001  if (err < 0) {
1002  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1003  return AVERROR_UNKNOWN;
1004  }
1005 
1006  return 0;
1007 }
1008 
1010  const AVFrame *src)
1011 {
1012  QSVFramesContext *s = ctx->internal->priv;
1013  mfxFrameSurface1 in = {{ 0 }};
1014  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1015 
1016  mfxSyncPoint sync = NULL;
1017  mfxStatus err;
1018  int ret = 0;
1019  /* make a copy if the input is not padded as libmfx requires */
1020  AVFrame tmp_frame;
1021  const AVFrame *src_frame;
1022  int realigned = 0;
1023 
1024 
1025  while (!s->session_upload_init && !s->session_upload && !ret) {
1026 #if HAVE_PTHREADS
1027  if (pthread_mutex_trylock(&s->session_lock) == 0) {
1028 #endif
1029  if (!s->session_upload_init) {
1030  ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
1031  if (s->session_upload)
1032  s->session_upload_init = 1;
1033  }
1034 #if HAVE_PTHREADS
1035  pthread_mutex_unlock(&s->session_lock);
1036  pthread_cond_signal(&s->session_cond);
1037  } else {
1038  pthread_mutex_lock(&s->session_lock);
1039  while (!s->session_upload_init && !s->session_upload) {
1040  pthread_cond_wait(&s->session_cond, &s->session_lock);
1041  }
1042  pthread_mutex_unlock(&s->session_lock);
1043  }
1044 #endif
1045  }
1046  if (ret < 0)
1047  return ret;
1048 
1049  if (src->height & 15 || src->linesize[0] & 15) {
1050  realigned = 1;
1051  memset(&tmp_frame, 0, sizeof(tmp_frame));
1052  tmp_frame.format = src->format;
1053  tmp_frame.width = FFALIGN(src->width, 16);
1054  tmp_frame.height = FFALIGN(src->height, 16);
1055  ret = av_frame_get_buffer(&tmp_frame, 0);
1056  if (ret < 0)
1057  return ret;
1058 
1059  ret = av_frame_copy(&tmp_frame, src);
1060  if (ret < 0) {
1061  av_frame_unref(&tmp_frame);
1062  return ret;
1063  }
1064  }
1065 
1066  src_frame = realigned ? &tmp_frame : src;
1067 
1068  if (!s->session_upload) {
1069  if (s->child_frames_ref)
1070  return qsv_transfer_data_child(ctx, dst, src_frame);
1071 
1072  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1073  return AVERROR(ENOSYS);
1074  }
1075 
1076  in.Info = out->Info;
1077  map_frame_to_surface(src_frame, &in);
1078 
1079  do {
1080  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1081  if (err == MFX_WRN_DEVICE_BUSY)
1082  av_usleep(1);
1083  } while (err == MFX_WRN_DEVICE_BUSY);
1084 
1085  if (err < 0 || !sync) {
1086  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1087  return AVERROR_UNKNOWN;
1088  }
1089 
1090  do {
1091  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1092  } while (err == MFX_WRN_IN_EXECUTION);
1093  if (err < 0) {
1094  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1095  return AVERROR_UNKNOWN;
1096  }
1097 
1098  if (realigned)
1099  av_frame_unref(&tmp_frame);
1100 
1101  return 0;
1102 }
1103 
1105  AVHWFramesContext *src_ctx, int flags)
1106 {
1107  QSVFramesContext *s = dst_ctx->internal->priv;
1108  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1109  int i;
1110 
1111  if (src_ctx->initial_pool_size == 0) {
1112  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1113  "mapped to QSV frames.\n");
1114  return AVERROR(EINVAL);
1115  }
1116 
1117  switch (src_ctx->device_ctx->type) {
1118 #if CONFIG_VAAPI
1120  {
1121  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1122  s->handle_pairs_internal = av_mallocz_array(src_ctx->initial_pool_size, sizeof(*s->handle_pairs_internal));
1123  if (!s->handle_pairs_internal)
1124  return AVERROR(ENOMEM);
1125  s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
1126  sizeof(*s->surfaces_internal));
1127  if (!s->surfaces_internal)
1128  return AVERROR(ENOMEM);
1129  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1130  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1131  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1132  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1133  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1134  }
1135  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1136  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1137  }
1138  break;
1139 #endif
1140 #if CONFIG_D3D11VA
1142  {
1143  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1144  s->handle_pairs_internal = av_mallocz_array(src_ctx->initial_pool_size, sizeof(*s->handle_pairs_internal));
1145  if (!s->handle_pairs_internal)
1146  return AVERROR(ENOMEM);
1147  s->surfaces_internal = av_mallocz_array(src_ctx->initial_pool_size,
1148  sizeof(*s->surfaces_internal));
1149  if (!s->surfaces_internal)
1150  return AVERROR(ENOMEM);
1151  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1152  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1153  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1154  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1155  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1156  } else {
1157  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1158  }
1159  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1160  }
1161  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1162  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1163  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1164  } else {
1165  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1166  }
1167  }
1168  break;
1169 #endif
1170 #if CONFIG_DXVA2
1172  {
1173  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1174  s->handle_pairs_internal = av_mallocz_array(src_ctx->initial_pool_size, sizeof(*s->handle_pairs_internal));
1175  if (!s->handle_pairs_internal)
1176  return AVERROR(ENOMEM);
1177  s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
1178  sizeof(*s->surfaces_internal));
1179  if (!s->surfaces_internal)
1180  return AVERROR(ENOMEM);
1181  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1182  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1183  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1184  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1185  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1186  }
1187  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1188  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1189  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1190  else
1191  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1192  }
1193  break;
1194 #endif
1195  default:
1196  return AVERROR(ENOSYS);
1197  }
1198 
1199  dst_hwctx->surfaces = s->surfaces_internal;
1200 
1201  return 0;
1202 }
1203 
1204 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1205  AVFrame *dst, const AVFrame *src, int flags)
1206 {
1207  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1208  int i, err, index = -1;
1209 
1210  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1211  switch(src->format) {
1212 #if CONFIG_VAAPI
1213  case AV_PIX_FMT_VAAPI:
1214  {
1215  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1216  if (pair->first == src->data[3]) {
1217  index = i;
1218  break;
1219  }
1220  }
1221 #endif
1222 #if CONFIG_D3D11VA
1223  case AV_PIX_FMT_D3D11:
1224  {
1225  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1226  if (pair->first == src->data[0]
1227  && pair->second == src->data[1]) {
1228  index = i;
1229  break;
1230  }
1231  }
1232 #endif
1233 #if CONFIG_DXVA2
1234  case AV_PIX_FMT_DXVA2_VLD:
1235  {
1236  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1237  if (pair->first == src->data[3]) {
1238  index = i;
1239  break;
1240  }
1241  }
1242 #endif
1243  }
1244  }
1245  if (index < 0) {
1246  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1247  "is not in the mapped frames context.\n");
1248  return AVERROR(EINVAL);
1249  }
1250 
1252  dst, src, NULL, NULL);
1253  if (err)
1254  return err;
1255 
1256  dst->width = src->width;
1257  dst->height = src->height;
1258  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1259 
1260  return 0;
1261 }
1262 
1264  const void *hwconfig,
1265  AVHWFramesConstraints *constraints)
1266 {
1267  int i;
1268 
1270  sizeof(*constraints->valid_sw_formats));
1271  if (!constraints->valid_sw_formats)
1272  return AVERROR(ENOMEM);
1273 
1274  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1275  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1277 
1278  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1279  if (!constraints->valid_hw_formats)
1280  return AVERROR(ENOMEM);
1281 
1282  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1283  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1284 
1285  return 0;
1286 }
1287 
1289 {
1290  AVQSVDeviceContext *hwctx = ctx->hwctx;
1291  QSVDevicePriv *priv = ctx->user_opaque;
1292 
1293  if (hwctx->session)
1294  MFXClose(hwctx->session);
1295 
1297  av_freep(&priv);
1298 }
1299 
1300 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1301 {
1302  static const struct {
1303  const char *name;
1304  mfxIMPL impl;
1305  } impl_map[] = {
1306  { "auto", MFX_IMPL_AUTO },
1307  { "sw", MFX_IMPL_SOFTWARE },
1308  { "hw", MFX_IMPL_HARDWARE },
1309  { "auto_any", MFX_IMPL_AUTO_ANY },
1310  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1311  { "hw2", MFX_IMPL_HARDWARE2 },
1312  { "hw3", MFX_IMPL_HARDWARE3 },
1313  { "hw4", MFX_IMPL_HARDWARE4 },
1314  };
1315 
1316  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1317  int i;
1318 
1319  if (device) {
1320  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1321  if (!strcmp(device, impl_map[i].name)) {
1322  impl = impl_map[i].impl;
1323  break;
1324  }
1325  if (i == FF_ARRAY_ELEMS(impl_map))
1326  impl = strtol(device, NULL, 0);
1327  }
1328 
1329  if (impl != MFX_IMPL_SOFTWARE) {
1330  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1331  impl |= MFX_IMPL_VIA_D3D11;
1332  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1333  impl |= MFX_IMPL_VIA_D3D9;
1334  }
1335 
1336  return impl;
1337 }
1338 
1340  mfxIMPL implementation,
1341  AVHWDeviceContext *child_device_ctx,
1342  int flags)
1343 {
1344  AVQSVDeviceContext *hwctx = ctx->hwctx;
1345 
1346  mfxVersion ver = { { 3, 1 } };
1347  mfxHDL handle;
1348  mfxHandleType handle_type;
1349  mfxStatus err;
1350  int ret;
1351 
1352  switch (child_device_ctx->type) {
1353 #if CONFIG_VAAPI
1355  {
1356  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1357  handle_type = MFX_HANDLE_VA_DISPLAY;
1358  handle = (mfxHDL)child_device_hwctx->display;
1359  }
1360  break;
1361 #endif
1362 #if CONFIG_D3D11VA
1364  {
1365  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1366  handle_type = MFX_HANDLE_D3D11_DEVICE;
1367  handle = (mfxHDL)child_device_hwctx->device;
1368  }
1369  break;
1370 #endif
1371 #if CONFIG_DXVA2
1373  {
1374  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1375  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1376  handle = (mfxHDL)child_device_hwctx->devmgr;
1377  }
1378  break;
1379 #endif
1380  default:
1381  ret = AVERROR(ENOSYS);
1382  goto fail;
1383  }
1384 
1385  err = MFXInit(implementation, &ver, &hwctx->session);
1386  if (err != MFX_ERR_NONE) {
1387  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1388  "%d.\n", err);
1389  ret = AVERROR_UNKNOWN;
1390  goto fail;
1391  }
1392 
1393  err = MFXQueryVersion(hwctx->session, &ver);
1394  if (err != MFX_ERR_NONE) {
1395  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1396  ret = AVERROR_UNKNOWN;
1397  goto fail;
1398  }
1399 
1401  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1402  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1403 
1404  MFXClose(hwctx->session);
1405 
1406  err = MFXInit(implementation, &ver, &hwctx->session);
1407  if (err != MFX_ERR_NONE) {
1409  "Error initializing an MFX session: %d.\n", err);
1410  ret = AVERROR_UNKNOWN;
1411  goto fail;
1412  }
1413 
1414  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1415  if (err != MFX_ERR_NONE) {
1416  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1417  "%d\n", err);
1418  ret = AVERROR_UNKNOWN;
1419  goto fail;
1420  }
1421 
1422  return 0;
1423 
1424 fail:
1425  if (hwctx->session)
1426  MFXClose(hwctx->session);
1427  return ret;
1428 }
1429 
1431  AVHWDeviceContext *child_device_ctx,
1432  AVDictionary *opts, int flags)
1433 {
1434  mfxIMPL impl;
1435  impl = choose_implementation("hw_any", child_device_ctx->type);
1436  return qsv_device_derive_from_child(ctx, impl,
1437  child_device_ctx, flags);
1438 }
1439 
1440 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1441  AVDictionary *opts, int flags)
1442 {
1443  QSVDevicePriv *priv;
1444  enum AVHWDeviceType child_device_type;
1445  AVHWDeviceContext *child_device;
1446  AVDictionary *child_device_opts;
1447  AVDictionaryEntry *e;
1448 
1449  mfxIMPL impl;
1450  int ret;
1451 
1452  priv = av_mallocz(sizeof(*priv));
1453  if (!priv)
1454  return AVERROR(ENOMEM);
1455 
1456  ctx->user_opaque = priv;
1457  ctx->free = qsv_device_free;
1458 
1459  e = av_dict_get(opts, "child_device_type", NULL, 0);
1460  if (e) {
1461  child_device_type = av_hwdevice_find_type_by_name(e ? e->value : NULL);
1462  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
1463  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
1464  "\"%s\".\n", e ? e->value : NULL);
1465  return AVERROR(EINVAL);
1466  }
1467  } else if (CONFIG_VAAPI) {
1468  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1469  } else if (CONFIG_DXVA2) {
1471  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
1472  "with old commandlines. This behaviour will be removed "
1473  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
1474  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1475  } else if (CONFIG_D3D11VA) {
1476  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
1477  } else {
1478  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1479  return AVERROR(ENOSYS);
1480  }
1481 
1482  child_device_opts = NULL;
1483  switch (child_device_type) {
1484 #if CONFIG_VAAPI
1486  {
1487  // libmfx does not actually implement VAAPI properly, rather it
1488  // depends on the specific behaviour of a matching iHD driver when
1489  // used on recent Intel hardware. Set options to the VAAPI device
1490  // creation so that we should pick a usable setup by default if
1491  // possible, even when multiple devices and drivers are available.
1492  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1493  av_dict_set(&child_device_opts, "driver", "iHD", 0);
1494  }
1495  break;
1496 #endif
1497 #if CONFIG_D3D11VA
1499  break;
1500 #endif
1501 #if CONFIG_DXVA2
1503  break;
1504 #endif
1505  default:
1506  {
1507  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1508  return AVERROR(ENOSYS);
1509  }
1510  break;
1511  }
1512 
1513  e = av_dict_get(opts, "child_device", NULL, 0);
1514  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1515  e ? e->value : NULL, child_device_opts, 0);
1516 
1517  av_dict_free(&child_device_opts);
1518  if (ret < 0)
1519  return ret;
1520 
1521  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1522 
1523  impl = choose_implementation(device, child_device_type);
1524 
1525  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1526 }
1527 
1530  .name = "QSV",
1531 
1532  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1533  .device_priv_size = sizeof(QSVDeviceContext),
1534  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1535  .frames_priv_size = sizeof(QSVFramesContext),
1536 
1537  .device_create = qsv_device_create,
1538  .device_derive = qsv_device_derive,
1539  .device_init = qsv_device_init,
1540  .frames_get_constraints = qsv_frames_get_constraints,
1541  .frames_init = qsv_frames_init,
1542  .frames_uninit = qsv_frames_uninit,
1543  .frames_get_buffer = qsv_get_buffer,
1544  .transfer_get_formats = qsv_transfer_get_formats,
1545  .transfer_data_to = qsv_transfer_data_to,
1546  .transfer_data_from = qsv_transfer_data_from,
1547  .map_to = qsv_map_to,
1548  .map_from = qsv_map_from,
1549  .frames_derive_to = qsv_frames_derive_to,
1550  .frames_derive_from = qsv_frames_derive_from,
1551 
1552  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1553 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:863
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:246
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:81
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1009
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2540
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:764
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:113
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:112
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
pixdesc.h
index
fg index
Definition: ffmpeg_filter.c:168
AVFrame::width
int width
Definition: frame.h:361
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1430
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:702
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:789
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:392
data
const char data[16]
Definition: mxf.c:143
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1300
QSVDeviceContext
Definition: hwcontext_qsv.c:61
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:82
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:214
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:737
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
supported_pixel_formats
static const struct @301 supported_pixel_formats[]
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:97
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:200
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:477
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:63
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:940
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:57
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:67
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:247
fail
#define fail()
Definition: checkasm.h:127
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1263
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1104
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:501
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:441
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:142
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:92
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:473
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:387
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:58
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:257
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:62
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:87
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1528
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:196
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:55
src
#define src
Definition: vp8dsp.c:255
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:604
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:897
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:511
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:68
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:528
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:91
QSVFramesContext::session_upload_init
int session_upload_init
Definition: hwcontext_qsv.c:75
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:670
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:678
QSVFramesContext::session_download_init
int session_download_init
Definition: hwcontext_qsv.c:73
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:84
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1288
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:376
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:684
buffer.h
AVD3D11VAFramesContext::texture
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
Definition: hwcontext_d3d11va.h:152
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:1339
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:303
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:516
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
hwcontext_qsv.h
i
int i
Definition: input.c:406
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:140
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:83
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:89
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:437
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:261
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:72
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
pthread_cond_t
Definition: os2threads.h:58
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1204
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:96
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:610
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:65
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:443
pthread_cond_signal
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
Definition: os2threads.h:152
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:506
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:607
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:226
AVFrame::height
int height
Definition: frame.h:361
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:68
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
dummy
int dummy
Definition: motion.c:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:74
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1440
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:438
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:79
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:82
AVDictionaryEntry
Definition: dict.h:79
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:93
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:230
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:81
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:428
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:64