FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdint.h>
20 #include <string.h>
21 
22 #include <mfx/mfxvideo.h>
23 
24 #include "config.h"
25 
26 #if HAVE_PTHREADS
27 #include <pthread.h>
28 #endif
29 
30 #define COBJMACROS
31 #if CONFIG_VAAPI
32 #include "hwcontext_vaapi.h"
33 #endif
34 #if CONFIG_D3D11VA
35 #include "hwcontext_d3d11va.h"
36 #endif
37 #if CONFIG_DXVA2
38 #include "hwcontext_dxva2.h"
39 #endif
40 
41 #include "buffer.h"
42 #include "common.h"
43 #include "hwcontext.h"
44 #include "hwcontext_internal.h"
45 #include "hwcontext_qsv.h"
46 #include "mem.h"
47 #include "pixfmt.h"
48 #include "pixdesc.h"
49 #include "time.h"
50 #include "imgutils.h"
51 
52 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
53  (MFX_VERSION_MAJOR > (MAJOR) || \
54  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
55 
56 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
57 
58 typedef struct QSVDevicePriv {
61 
62 typedef struct QSVDeviceContext {
63  mfxHDL handle;
64  mfxHandleType handle_type;
65  mfxVersion ver;
66  mfxIMPL impl;
67 
71 
72 typedef struct QSVFramesContext {
73  mfxSession session_download;
75  mfxSession session_upload;
77 #if HAVE_PTHREADS
78  pthread_mutex_t session_lock;
79  pthread_cond_t session_cond;
80 #endif
81 
83  mfxFrameSurface1 *surfaces_internal;
84  mfxHDLPair *handle_pairs_internal;
86 
87  // used in the frame allocator for non-opaque surfaces
88  mfxMemId *mem_ids;
89  // used in the opaque alloc request for opaque surfaces
90  mfxFrameSurface1 **surface_ptrs;
91 
92  mfxExtOpaqueSurfaceAlloc opaque_alloc;
93  mfxExtBuffer *ext_buffers[1];
96 
97 static const struct {
99  uint32_t fourcc;
101  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
102  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
103  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
104  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
105 #if CONFIG_VAAPI
107  MFX_FOURCC_YUY2 },
108 #if QSV_VERSION_ATLEAST(1, 27)
109  { AV_PIX_FMT_Y210,
110  MFX_FOURCC_Y210 },
111 #endif
112 #endif
113 };
114 
116 {
117  int i;
118  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
120  return supported_pixel_formats[i].fourcc;
121  }
122  return 0;
123 }
124 
125 #if CONFIG_D3D11VA
126 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
127 {
128  uint32_t bind_flags = 0;
129 
130  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
131  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
132  else
133  bind_flags = D3D11_BIND_DECODER;
134 
135  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
136  bind_flags = D3D11_BIND_RENDER_TARGET;
137 
138  return bind_flags;
139 }
140 #endif
141 
142 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
143 {
144  const AVPixFmtDescriptor *desc;
145  int i, planes_nb = 0;
146  if (dst->format != src->format)
147  return AVERROR(EINVAL);
148 
150 
151  for (i = 0; i < desc->nb_components; i++)
152  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
153 
154  for (i = 0; i < planes_nb; i++) {
155  int sheight, dheight, y;
156  ptrdiff_t swidth = av_image_get_linesize(src->format,
157  src->width,
158  i);
159  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
160  dst->width,
161  i);
162  const AVComponentDescriptor comp = desc->comp[i];
163  if (swidth < 0 || dwidth < 0) {
164  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
165  return AVERROR(EINVAL);
166  }
167  sheight = src->height;
168  dheight = dst->height;
169  if (i) {
170  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
171  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
172  }
173  //fill right padding
174  for (y = 0; y < sheight; y++) {
175  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
176  av_memcpy_backptr(line_ptr,
177  comp.depth > 8 ? 2 : 1,
178  dwidth - swidth);
179  }
180  //fill bottom padding
181  for (y = sheight; y < dheight; y++) {
182  memcpy(dst->data[i]+y*dst->linesize[i],
183  dst->data[i]+(sheight-1)*dst->linesize[i],
184  dwidth);
185  }
186  }
187  return 0;
188 }
189 
191 {
192  AVQSVDeviceContext *hwctx = ctx->hwctx;
193  QSVDeviceContext *s = ctx->internal->priv;
194  int hw_handle_supported = 0;
195  mfxHandleType handle_type;
196  enum AVHWDeviceType device_type;
197  enum AVPixelFormat pix_fmt;
198  mfxStatus err;
199 
200  err = MFXQueryIMPL(hwctx->session, &s->impl);
201  if (err == MFX_ERR_NONE)
202  err = MFXQueryVersion(hwctx->session, &s->ver);
203  if (err != MFX_ERR_NONE) {
204  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
205  return AVERROR_UNKNOWN;
206  }
207 
208  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
209 #if CONFIG_VAAPI
210  handle_type = MFX_HANDLE_VA_DISPLAY;
211  device_type = AV_HWDEVICE_TYPE_VAAPI;
213  hw_handle_supported = 1;
214 #endif
215  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
216 #if CONFIG_D3D11VA
217  handle_type = MFX_HANDLE_D3D11_DEVICE;
218  device_type = AV_HWDEVICE_TYPE_D3D11VA;
220  hw_handle_supported = 1;
221 #endif
222  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
223 #if CONFIG_DXVA2
224  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
225  device_type = AV_HWDEVICE_TYPE_DXVA2;
227  hw_handle_supported = 1;
228 #endif
229  }
230 
231  if (hw_handle_supported) {
232  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
233  if (err == MFX_ERR_NONE) {
234  s->handle_type = handle_type;
235  s->child_device_type = device_type;
236  s->child_pix_fmt = pix_fmt;
237  }
238  }
239  if (!s->handle) {
240  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
241  "from the session\n");
242  }
243  return 0;
244 }
245 
247 {
248  QSVFramesContext *s = ctx->internal->priv;
249 
250  if (s->session_download) {
251  MFXVideoVPP_Close(s->session_download);
252  MFXClose(s->session_download);
253  }
254  s->session_download = NULL;
255  s->session_download_init = 0;
256 
257  if (s->session_upload) {
258  MFXVideoVPP_Close(s->session_upload);
259  MFXClose(s->session_upload);
260  }
261  s->session_upload = NULL;
262  s->session_upload_init = 0;
263 
264 #if HAVE_PTHREADS
265  pthread_mutex_destroy(&s->session_lock);
266  pthread_cond_destroy(&s->session_cond);
267 #endif
268 
269  av_freep(&s->mem_ids);
270  av_freep(&s->surface_ptrs);
271  av_freep(&s->surfaces_internal);
272  av_freep(&s->handle_pairs_internal);
273  av_frame_unref(&s->realigned_tmp_frame);
274  av_buffer_unref(&s->child_frames_ref);
275 }
276 
277 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
278 {
279 }
280 
281 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
282 {
284  QSVFramesContext *s = ctx->internal->priv;
285  AVQSVFramesContext *hwctx = ctx->hwctx;
286 
287  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
288  s->nb_surfaces_used++;
289  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
290  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
291  }
292 
293  return NULL;
294 }
295 
297 {
298  AVQSVFramesContext *hwctx = ctx->hwctx;
299  QSVFramesContext *s = ctx->internal->priv;
300  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
301 
302  AVBufferRef *child_device_ref = NULL;
303  AVBufferRef *child_frames_ref = NULL;
304 
305  AVHWDeviceContext *child_device_ctx;
306  AVHWFramesContext *child_frames_ctx;
307 
308  int i, ret = 0;
309 
310  if (!device_priv->handle) {
312  "Cannot create a non-opaque internal surface pool without "
313  "a hardware handle\n");
314  return AVERROR(EINVAL);
315  }
316 
317  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
318  if (!child_device_ref)
319  return AVERROR(ENOMEM);
320  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
321 
322 #if CONFIG_VAAPI
323  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
324  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
325  child_device_hwctx->display = (VADisplay)device_priv->handle;
326  }
327 #endif
328 #if CONFIG_D3D11VA
329  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
330  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
331  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
332  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
333  }
334 #endif
335 #if CONFIG_DXVA2
336  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
337  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
338  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
339  }
340 #endif
341 
342  ret = av_hwdevice_ctx_init(child_device_ref);
343  if (ret < 0) {
344  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
345  goto fail;
346  }
347 
348  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
349  if (!child_frames_ref) {
350  ret = AVERROR(ENOMEM);
351  goto fail;
352  }
353  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
354 
355  child_frames_ctx->format = device_priv->child_pix_fmt;
356  child_frames_ctx->sw_format = ctx->sw_format;
357  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
358  child_frames_ctx->width = FFALIGN(ctx->width, 16);
359  child_frames_ctx->height = FFALIGN(ctx->height, 16);
360 
361 #if CONFIG_D3D11VA
362  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
363  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
364  if (hwctx->frame_type == 0)
365  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
366  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
367  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
368  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
369  }
370 #endif
371 #if CONFIG_DXVA2
372  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
373  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
374  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
375  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
376  else
377  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
378  }
379 #endif
380 
381  ret = av_hwframe_ctx_init(child_frames_ref);
382  if (ret < 0) {
383  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
384  goto fail;
385  }
386 
387 #if CONFIG_VAAPI
388  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
389  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
390  for (i = 0; i < ctx->initial_pool_size; i++) {
391  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
392  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
393  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
394  }
395  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
396  }
397 #endif
398 #if CONFIG_D3D11VA
399  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
400  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
401  for (i = 0; i < ctx->initial_pool_size; i++) {
402  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
403  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
404  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
405  } else {
406  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
407  }
408  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
409  }
410  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
411  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
412  } else {
413  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
414  }
415  }
416 #endif
417 #if CONFIG_DXVA2
418  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
419  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
420  for (i = 0; i < ctx->initial_pool_size; i++) {
421  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
422  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
423  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
424  }
425  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
426  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
427  else
428  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
429  }
430 #endif
431 
432  s->child_frames_ref = child_frames_ref;
433  child_frames_ref = NULL;
434 
435 fail:
436  av_buffer_unref(&child_device_ref);
437  av_buffer_unref(&child_frames_ref);
438  return ret;
439 }
440 
441 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
442 {
443  const AVPixFmtDescriptor *desc;
444  uint32_t fourcc;
445 
446  desc = av_pix_fmt_desc_get(ctx->sw_format);
447  if (!desc)
448  return AVERROR(EINVAL);
449 
450  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
451  if (!fourcc)
452  return AVERROR(EINVAL);
453 
454  surf->Info.BitDepthLuma = desc->comp[0].depth;
455  surf->Info.BitDepthChroma = desc->comp[0].depth;
456  surf->Info.Shift = desc->comp[0].depth > 8;
457 
458  if (desc->log2_chroma_w && desc->log2_chroma_h)
459  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
460  else if (desc->log2_chroma_w)
461  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
462  else
463  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
464 
465  surf->Info.FourCC = fourcc;
466  surf->Info.Width = FFALIGN(ctx->width, 16);
467  surf->Info.CropW = ctx->width;
468  surf->Info.Height = FFALIGN(ctx->height, 16);
469  surf->Info.CropH = ctx->height;
470  surf->Info.FrameRateExtN = 25;
471  surf->Info.FrameRateExtD = 1;
472  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
473 
474  return 0;
475 }
476 
478 {
479  QSVFramesContext *s = ctx->internal->priv;
480  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
481 
482  int i, ret = 0;
483 
484  if (ctx->initial_pool_size <= 0) {
485  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
486  return AVERROR(EINVAL);
487  }
488 
489  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
490  sizeof(*s->handle_pairs_internal));
491  if (!s->handle_pairs_internal)
492  return AVERROR(ENOMEM);
493 
494  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
495  sizeof(*s->surfaces_internal));
496  if (!s->surfaces_internal)
497  return AVERROR(ENOMEM);
498 
499  for (i = 0; i < ctx->initial_pool_size; i++) {
500  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
501  if (ret < 0)
502  return ret;
503  }
504 
505  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
507  if (ret < 0)
508  return ret;
509  }
510 
511  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
513  if (!ctx->internal->pool_internal)
514  return AVERROR(ENOMEM);
515 
516  frames_hwctx->surfaces = s->surfaces_internal;
517  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
518 
519  return 0;
520 }
521 
522 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
523  mfxFrameAllocResponse *resp)
524 {
525  AVHWFramesContext *ctx = pthis;
526  QSVFramesContext *s = ctx->internal->priv;
527  AVQSVFramesContext *hwctx = ctx->hwctx;
528  mfxFrameInfo *i = &req->Info;
529  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
530 
531  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
532  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
533  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
534  return MFX_ERR_UNSUPPORTED;
535  if (i->Width > i1->Width || i->Height > i1->Height ||
536  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
537  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
538  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
539  i->Width, i->Height, i->FourCC, i->ChromaFormat,
540  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
541  return MFX_ERR_UNSUPPORTED;
542  }
543 
544  resp->mids = s->mem_ids;
545  resp->NumFrameActual = hwctx->nb_surfaces;
546 
547  return MFX_ERR_NONE;
548 }
549 
550 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
551 {
552  return MFX_ERR_NONE;
553 }
554 
555 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
556 {
557  return MFX_ERR_UNSUPPORTED;
558 }
559 
560 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
561 {
562  return MFX_ERR_UNSUPPORTED;
563 }
564 
565 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
566 {
567  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
568  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
569 
570  pair_dst->first = pair_src->first;
571 
572  if (pair_src->second != (mfxMemId)MFX_INFINITE)
573  pair_dst->second = pair_src->second;
574  return MFX_ERR_NONE;
575 }
576 
578  mfxSession *session, int upload)
579 {
580  QSVFramesContext *s = ctx->internal->priv;
581  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
582  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
583  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
584 
585  mfxFrameAllocator frame_allocator = {
586  .pthis = ctx,
587  .Alloc = frame_alloc,
588  .Lock = frame_lock,
589  .Unlock = frame_unlock,
590  .GetHDL = frame_get_hdl,
591  .Free = frame_free,
592  };
593 
594  mfxVideoParam par;
595  mfxStatus err;
596 
597  err = MFXInit(device_priv->impl, &device_priv->ver, session);
598  if (err != MFX_ERR_NONE) {
599  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
600  return AVERROR_UNKNOWN;
601  }
602 
603  if (device_priv->handle) {
604  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
605  device_priv->handle);
606  if (err != MFX_ERR_NONE)
607  return AVERROR_UNKNOWN;
608  }
609 
610  if (!opaque) {
611  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
612  if (err != MFX_ERR_NONE)
613  return AVERROR_UNKNOWN;
614  }
615 
616  memset(&par, 0, sizeof(par));
617 
618  if (opaque) {
619  par.ExtParam = s->ext_buffers;
620  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
621  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
622  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
623  } else {
624  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
625  MFX_IOPATTERN_IN_VIDEO_MEMORY;
626  }
627 
628  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
629  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
630  par.AsyncDepth = 1;
631 
632  par.vpp.In = frames_hwctx->surfaces[0].Info;
633 
634  /* Apparently VPP requires the frame rate to be set to some value, otherwise
635  * init will fail (probably for the framerate conversion filter). Since we
636  * are only doing data upload/download here, we just invent an arbitrary
637  * value */
638  par.vpp.In.FrameRateExtN = 25;
639  par.vpp.In.FrameRateExtD = 1;
640  par.vpp.Out = par.vpp.In;
641 
642  err = MFXVideoVPP_Init(*session, &par);
643  if (err != MFX_ERR_NONE) {
644  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
645  "Surface upload/download will not be possible\n");
646  MFXClose(*session);
647  *session = NULL;
648  }
649 
650  return 0;
651 }
652 
654 {
655  QSVFramesContext *s = ctx->internal->priv;
656  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
657 
658  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
659 
660  uint32_t fourcc;
661  int i, ret;
662 
663  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
664  if (!fourcc) {
665  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
666  return AVERROR(ENOSYS);
667  }
668 
669  if (!ctx->pool) {
671  if (ret < 0) {
672  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
673  return ret;
674  }
675  }
676 
677  if (opaque) {
678  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
679  sizeof(*s->surface_ptrs));
680  if (!s->surface_ptrs)
681  return AVERROR(ENOMEM);
682 
683  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
684  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
685 
686  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
687  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
688  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
689 
690  s->opaque_alloc.Out = s->opaque_alloc.In;
691 
692  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
693  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
694 
695  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
696  } else {
697  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
698  if (!s->mem_ids)
699  return AVERROR(ENOMEM);
700 
701  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
702  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
703  }
704 
705  s->session_download = NULL;
706  s->session_upload = NULL;
707 
708  s->session_download_init = 0;
709  s->session_upload_init = 0;
710 
711 #if HAVE_PTHREADS
712  pthread_mutex_init(&s->session_lock, NULL);
713  pthread_cond_init(&s->session_cond, NULL);
714 #endif
715 
716  return 0;
717 }
718 
720 {
721  frame->buf[0] = av_buffer_pool_get(ctx->pool);
722  if (!frame->buf[0])
723  return AVERROR(ENOMEM);
724 
725  frame->data[3] = frame->buf[0]->data;
726  frame->format = AV_PIX_FMT_QSV;
727  frame->width = ctx->width;
728  frame->height = ctx->height;
729 
730  return 0;
731 }
732 
735  enum AVPixelFormat **formats)
736 {
737  enum AVPixelFormat *fmts;
738 
739  fmts = av_malloc_array(2, sizeof(*fmts));
740  if (!fmts)
741  return AVERROR(ENOMEM);
742 
743  fmts[0] = ctx->sw_format;
744  fmts[1] = AV_PIX_FMT_NONE;
745 
746  *formats = fmts;
747 
748  return 0;
749 }
750 
752  AVHWFramesContext *src_ctx, int flags)
753 {
754  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
755  int i;
756 
757  switch (dst_ctx->device_ctx->type) {
758 #if CONFIG_VAAPI
760  {
761  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
762  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
763  sizeof(*dst_hwctx->surface_ids));
764  if (!dst_hwctx->surface_ids)
765  return AVERROR(ENOMEM);
766  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
767  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
768  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
769  }
770  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
771  }
772  break;
773 #endif
774 #if CONFIG_D3D11VA
776  {
777  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
778  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
779  dst_hwctx->texture = (ID3D11Texture2D*)pair->first;
780  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
781  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
782  dst_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(src_hwctx->frame_type);
783  }
784  break;
785 #endif
786 #if CONFIG_DXVA2
788  {
789  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
790  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
791  sizeof(*dst_hwctx->surfaces));
792  if (!dst_hwctx->surfaces)
793  return AVERROR(ENOMEM);
794  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
795  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
796  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
797  }
798  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
799  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
800  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
801  else
802  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
803  }
804  break;
805 #endif
806  default:
807  return AVERROR(ENOSYS);
808  }
809 
810  return 0;
811 }
812 
814  AVFrame *dst, const AVFrame *src, int flags)
815 {
816  QSVFramesContext *s = ctx->internal->priv;
817  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
818  AVHWFramesContext *child_frames_ctx;
819  const AVPixFmtDescriptor *desc;
820  uint8_t *child_data;
821  AVFrame *dummy;
822  int ret = 0;
823 
824  if (!s->child_frames_ref)
825  return AVERROR(ENOSYS);
826  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
827 
828  switch (child_frames_ctx->device_ctx->type) {
829 #if CONFIG_VAAPI
831  {
832  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
833  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
834  * we need this casting for vaapi.
835  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
836  * to avoid compile warning */
837  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
838  break;
839  }
840 #endif
841 #if CONFIG_D3D11VA
843  {
844  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
845  child_data = pair->first;
846  break;
847  }
848 #endif
849 #if CONFIG_DXVA2
851  {
852  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
853  child_data = pair->first;
854  break;
855  }
856 #endif
857  default:
858  return AVERROR(ENOSYS);
859  }
860 
861  if (dst->format == child_frames_ctx->format) {
862  ret = ff_hwframe_map_create(s->child_frames_ref,
863  dst, src, NULL, NULL);
864  if (ret < 0)
865  return ret;
866 
867  dst->width = src->width;
868  dst->height = src->height;
869 
870  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
871  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
872  dst->data[0] = pair->first;
873  dst->data[1] = pair->second;
874  } else {
875  dst->data[3] = child_data;
876  }
877 
878  return 0;
879  }
880 
882  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
883  // This only supports mapping to software.
884  return AVERROR(ENOSYS);
885  }
886 
887  dummy = av_frame_alloc();
888  if (!dummy)
889  return AVERROR(ENOMEM);
890 
891  dummy->buf[0] = av_buffer_ref(src->buf[0]);
892  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
893  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
894  goto fail;
895 
896  dummy->format = child_frames_ctx->format;
897  dummy->width = src->width;
898  dummy->height = src->height;
899 
900  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
901  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
902  dummy->data[0] = pair->first;
903  dummy->data[1] = pair->second;
904  } else {
905  dummy->data[3] = child_data;
906  }
907 
908  ret = av_hwframe_map(dst, dummy, flags);
909 
910 fail:
912 
913  return ret;
914 }
915 
917  const AVFrame *src)
918 {
919  QSVFramesContext *s = ctx->internal->priv;
920  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
921  int download = !!src->hw_frames_ctx;
922  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
923 
924  AVFrame *dummy;
925  int ret;
926 
927  dummy = av_frame_alloc();
928  if (!dummy)
929  return AVERROR(ENOMEM);
930 
931  dummy->format = child_frames_ctx->format;
932  dummy->width = src->width;
933  dummy->height = src->height;
934  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
935  dummy->data[3] = surf->Data.MemId;
936  dummy->hw_frames_ctx = s->child_frames_ref;
937 
938  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
940 
941  dummy->buf[0] = NULL;
942  dummy->data[3] = NULL;
943  dummy->hw_frames_ctx = NULL;
944 
946 
947  return ret;
948 }
949 
950 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
951 {
952  switch (frame->format) {
953  case AV_PIX_FMT_NV12:
954  case AV_PIX_FMT_P010:
955  surface->Data.Y = frame->data[0];
956  surface->Data.UV = frame->data[1];
957  break;
958 
959  case AV_PIX_FMT_YUV420P:
960  surface->Data.Y = frame->data[0];
961  surface->Data.U = frame->data[1];
962  surface->Data.V = frame->data[2];
963  break;
964 
965  case AV_PIX_FMT_BGRA:
966  surface->Data.B = frame->data[0];
967  surface->Data.G = frame->data[0] + 1;
968  surface->Data.R = frame->data[0] + 2;
969  surface->Data.A = frame->data[0] + 3;
970  break;
971 #if CONFIG_VAAPI
972  case AV_PIX_FMT_YUYV422:
973  surface->Data.Y = frame->data[0];
974  surface->Data.U = frame->data[0] + 1;
975  surface->Data.V = frame->data[0] + 3;
976  break;
977 
978  case AV_PIX_FMT_Y210:
979  surface->Data.Y16 = (mfxU16 *)frame->data[0];
980  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
981  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
982  break;
983 #endif
984  default:
985  return MFX_ERR_UNSUPPORTED;
986  }
987  surface->Data.Pitch = frame->linesize[0];
988  surface->Data.TimeStamp = frame->pts;
989 
990  return 0;
991 }
992 
994  const AVFrame *src)
995 {
996  QSVFramesContext *s = ctx->internal->priv;
997  mfxFrameSurface1 out = {{ 0 }};
998  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
999 
1000  mfxSyncPoint sync = NULL;
1001  mfxStatus err;
1002  int ret = 0;
1003 
1004  while (!s->session_download_init && !s->session_download && !ret) {
1005 #if HAVE_PTHREADS
1006  if (pthread_mutex_trylock(&s->session_lock) == 0) {
1007 #endif
1008  if (!s->session_download_init) {
1009  ret = qsv_init_internal_session(ctx, &s->session_download, 0);
1010  if (s->session_download)
1011  s->session_download_init = 1;
1012  }
1013 #if HAVE_PTHREADS
1014  pthread_mutex_unlock(&s->session_lock);
1015  pthread_cond_signal(&s->session_cond);
1016  } else {
1017  pthread_mutex_lock(&s->session_lock);
1018  while (!s->session_download_init && !s->session_download) {
1019  pthread_cond_wait(&s->session_cond, &s->session_lock);
1020  }
1021  pthread_mutex_unlock(&s->session_lock);
1022  }
1023 #endif
1024  }
1025 
1026  if (ret < 0)
1027  return ret;
1028 
1029  if (!s->session_download) {
1030  if (s->child_frames_ref)
1031  return qsv_transfer_data_child(ctx, dst, src);
1032 
1033  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1034  return AVERROR(ENOSYS);
1035  }
1036 
1037  out.Info = in->Info;
1038  map_frame_to_surface(dst, &out);
1039 
1040  do {
1041  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1042  if (err == MFX_WRN_DEVICE_BUSY)
1043  av_usleep(1);
1044  } while (err == MFX_WRN_DEVICE_BUSY);
1045 
1046  if (err < 0 || !sync) {
1047  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1048  return AVERROR_UNKNOWN;
1049  }
1050 
1051  do {
1052  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1053  } while (err == MFX_WRN_IN_EXECUTION);
1054  if (err < 0) {
1055  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1056  return AVERROR_UNKNOWN;
1057  }
1058 
1059  return 0;
1060 }
1061 
1063  const AVFrame *src)
1064 {
1065  QSVFramesContext *s = ctx->internal->priv;
1066  mfxFrameSurface1 in = {{ 0 }};
1067  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1068  mfxFrameInfo tmp_info;
1069 
1070  mfxSyncPoint sync = NULL;
1071  mfxStatus err;
1072  int ret = 0;
1073  /* make a copy if the input is not padded as libmfx requires */
1074  AVFrame *tmp_frame = &s->realigned_tmp_frame;
1075  const AVFrame *src_frame;
1076  int realigned = 0;
1077 
1078 
1079  while (!s->session_upload_init && !s->session_upload && !ret) {
1080 #if HAVE_PTHREADS
1081  if (pthread_mutex_trylock(&s->session_lock) == 0) {
1082 #endif
1083  if (!s->session_upload_init) {
1084  ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
1085  if (s->session_upload)
1086  s->session_upload_init = 1;
1087  }
1088 #if HAVE_PTHREADS
1089  pthread_mutex_unlock(&s->session_lock);
1090  pthread_cond_signal(&s->session_cond);
1091  } else {
1092  pthread_mutex_lock(&s->session_lock);
1093  while (!s->session_upload_init && !s->session_upload) {
1094  pthread_cond_wait(&s->session_cond, &s->session_lock);
1095  }
1096  pthread_mutex_unlock(&s->session_lock);
1097  }
1098 #endif
1099  }
1100  if (ret < 0)
1101  return ret;
1102 
1103  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1104  * Height must be a multiple of 16 for progressive frame sequence and a
1105  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1106  if (src->height & 15 || src->linesize[0] & 15) {
1107  realigned = 1;
1108  if (tmp_frame->format != src->format ||
1109  tmp_frame->width != FFALIGN(src->width, 16) ||
1110  tmp_frame->height != FFALIGN(src->height, 16)) {
1111  av_frame_unref(tmp_frame);
1112 
1113  tmp_frame->format = src->format;
1114  tmp_frame->width = FFALIGN(src->width, 16);
1115  tmp_frame->height = FFALIGN(src->height, 16);
1116  ret = av_frame_get_buffer(tmp_frame, 0);
1117  if (ret < 0)
1118  return ret;
1119  }
1120  ret = av_frame_copy(tmp_frame, src);
1121  if (ret < 0) {
1122  av_frame_unref(tmp_frame);
1123  return ret;
1124  }
1125  ret = qsv_fill_border(tmp_frame, src);
1126  if (ret < 0) {
1127  av_frame_unref(tmp_frame);
1128  return ret;
1129  }
1130 
1131  tmp_info = out->Info;
1132  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1133  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1134  }
1135 
1136  src_frame = realigned ? tmp_frame : src;
1137 
1138  if (!s->session_upload) {
1139  if (s->child_frames_ref)
1140  return qsv_transfer_data_child(ctx, dst, src_frame);
1141 
1142  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1143  return AVERROR(ENOSYS);
1144  }
1145 
1146  in.Info = out->Info;
1147  map_frame_to_surface(src_frame, &in);
1148 
1149  do {
1150  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1151  if (err == MFX_WRN_DEVICE_BUSY)
1152  av_usleep(1);
1153  } while (err == MFX_WRN_DEVICE_BUSY);
1154 
1155  if (err < 0 || !sync) {
1156  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1157  return AVERROR_UNKNOWN;
1158  }
1159 
1160  do {
1161  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1162  } while (err == MFX_WRN_IN_EXECUTION);
1163  if (err < 0) {
1164  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1165  return AVERROR_UNKNOWN;
1166  }
1167 
1168  if (realigned) {
1169  out->Info.CropW = tmp_info.CropW;
1170  out->Info.CropH = tmp_info.CropH;
1171  }
1172 
1173  return 0;
1174 }
1175 
1177  AVHWFramesContext *src_ctx, int flags)
1178 {
1179  QSVFramesContext *s = dst_ctx->internal->priv;
1180  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1181  int i;
1182 
1183  if (src_ctx->initial_pool_size == 0) {
1184  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1185  "mapped to QSV frames.\n");
1186  return AVERROR(EINVAL);
1187  }
1188 
1189  switch (src_ctx->device_ctx->type) {
1190 #if CONFIG_VAAPI
1192  {
1193  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1194  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1195  sizeof(*s->handle_pairs_internal));
1196  if (!s->handle_pairs_internal)
1197  return AVERROR(ENOMEM);
1198  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1199  sizeof(*s->surfaces_internal));
1200  if (!s->surfaces_internal)
1201  return AVERROR(ENOMEM);
1202  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1203  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1204  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1205  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1206  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1207  }
1208  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1209  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1210  }
1211  break;
1212 #endif
1213 #if CONFIG_D3D11VA
1215  {
1216  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1217  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1218  sizeof(*s->handle_pairs_internal));
1219  if (!s->handle_pairs_internal)
1220  return AVERROR(ENOMEM);
1221  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1222  sizeof(*s->surfaces_internal));
1223  if (!s->surfaces_internal)
1224  return AVERROR(ENOMEM);
1225  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1226  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1227  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1228  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1229  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1230  } else {
1231  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1232  }
1233  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1234  }
1235  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1236  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1237  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1238  } else {
1239  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1240  }
1241  }
1242  break;
1243 #endif
1244 #if CONFIG_DXVA2
1246  {
1247  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1248  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1249  sizeof(*s->handle_pairs_internal));
1250  if (!s->handle_pairs_internal)
1251  return AVERROR(ENOMEM);
1252  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1253  sizeof(*s->surfaces_internal));
1254  if (!s->surfaces_internal)
1255  return AVERROR(ENOMEM);
1256  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1257  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1258  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1259  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1260  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1261  }
1262  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1263  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1264  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1265  else
1266  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1267  }
1268  break;
1269 #endif
1270  default:
1271  return AVERROR(ENOSYS);
1272  }
1273 
1274  dst_hwctx->surfaces = s->surfaces_internal;
1275 
1276  return 0;
1277 }
1278 
1279 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1280  AVFrame *dst, const AVFrame *src, int flags)
1281 {
1282  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1283  int i, err, index = -1;
1284 
1285  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1286  switch(src->format) {
1287 #if CONFIG_VAAPI
1288  case AV_PIX_FMT_VAAPI:
1289  {
1290  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1291  if (*(VASurfaceID*)pair->first == (VASurfaceID)src->data[3]) {
1292  index = i;
1293  break;
1294  }
1295  }
1296 #endif
1297 #if CONFIG_D3D11VA
1298  case AV_PIX_FMT_D3D11:
1299  {
1300  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1301  if (pair->first == src->data[0]
1302  && pair->second == src->data[1]) {
1303  index = i;
1304  break;
1305  }
1306  }
1307 #endif
1308 #if CONFIG_DXVA2
1309  case AV_PIX_FMT_DXVA2_VLD:
1310  {
1311  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1312  if (pair->first == src->data[3]) {
1313  index = i;
1314  break;
1315  }
1316  }
1317 #endif
1318  }
1319  }
1320  if (index < 0) {
1321  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1322  "is not in the mapped frames context.\n");
1323  return AVERROR(EINVAL);
1324  }
1325 
1327  dst, src, NULL, NULL);
1328  if (err)
1329  return err;
1330 
1331  dst->width = src->width;
1332  dst->height = src->height;
1333  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1334 
1335  return 0;
1336 }
1337 
1339  const void *hwconfig,
1340  AVHWFramesConstraints *constraints)
1341 {
1342  int i;
1343 
1345  sizeof(*constraints->valid_sw_formats));
1346  if (!constraints->valid_sw_formats)
1347  return AVERROR(ENOMEM);
1348 
1349  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1350  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1352 
1353  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1354  if (!constraints->valid_hw_formats)
1355  return AVERROR(ENOMEM);
1356 
1357  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1358  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1359 
1360  return 0;
1361 }
1362 
1364 {
1365  AVQSVDeviceContext *hwctx = ctx->hwctx;
1366  QSVDevicePriv *priv = ctx->user_opaque;
1367 
1368  if (hwctx->session)
1369  MFXClose(hwctx->session);
1370 
1372  av_freep(&priv);
1373 }
1374 
1375 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1376 {
1377  static const struct {
1378  const char *name;
1379  mfxIMPL impl;
1380  } impl_map[] = {
1381  { "auto", MFX_IMPL_AUTO },
1382  { "sw", MFX_IMPL_SOFTWARE },
1383  { "hw", MFX_IMPL_HARDWARE },
1384  { "auto_any", MFX_IMPL_AUTO_ANY },
1385  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1386  { "hw2", MFX_IMPL_HARDWARE2 },
1387  { "hw3", MFX_IMPL_HARDWARE3 },
1388  { "hw4", MFX_IMPL_HARDWARE4 },
1389  };
1390 
1391  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1392  int i;
1393 
1394  if (device) {
1395  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1396  if (!strcmp(device, impl_map[i].name)) {
1397  impl = impl_map[i].impl;
1398  break;
1399  }
1400  if (i == FF_ARRAY_ELEMS(impl_map))
1401  impl = strtol(device, NULL, 0);
1402  }
1403 
1404  if (impl != MFX_IMPL_SOFTWARE) {
1405  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1406  impl |= MFX_IMPL_VIA_D3D11;
1407  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1408  impl |= MFX_IMPL_VIA_D3D9;
1409  }
1410 
1411  return impl;
1412 }
1413 
1415  mfxIMPL implementation,
1416  AVHWDeviceContext *child_device_ctx,
1417  int flags)
1418 {
1419  AVQSVDeviceContext *hwctx = ctx->hwctx;
1420 
1421  mfxVersion ver = { { 3, 1 } };
1422  mfxHDL handle;
1423  mfxHandleType handle_type;
1424  mfxStatus err;
1425  int ret;
1426 
1427  switch (child_device_ctx->type) {
1428 #if CONFIG_VAAPI
1430  {
1431  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1432  handle_type = MFX_HANDLE_VA_DISPLAY;
1433  handle = (mfxHDL)child_device_hwctx->display;
1434  }
1435  break;
1436 #endif
1437 #if CONFIG_D3D11VA
1439  {
1440  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1441  handle_type = MFX_HANDLE_D3D11_DEVICE;
1442  handle = (mfxHDL)child_device_hwctx->device;
1443  }
1444  break;
1445 #endif
1446 #if CONFIG_DXVA2
1448  {
1449  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1450  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1451  handle = (mfxHDL)child_device_hwctx->devmgr;
1452  }
1453  break;
1454 #endif
1455  default:
1456  ret = AVERROR(ENOSYS);
1457  goto fail;
1458  }
1459 
1460  err = MFXInit(implementation, &ver, &hwctx->session);
1461  if (err != MFX_ERR_NONE) {
1462  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1463  "%d.\n", err);
1464  ret = AVERROR_UNKNOWN;
1465  goto fail;
1466  }
1467 
1468  err = MFXQueryVersion(hwctx->session, &ver);
1469  if (err != MFX_ERR_NONE) {
1470  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1471  ret = AVERROR_UNKNOWN;
1472  goto fail;
1473  }
1474 
1476  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1477  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1478 
1479  MFXClose(hwctx->session);
1480 
1481  err = MFXInit(implementation, &ver, &hwctx->session);
1482  if (err != MFX_ERR_NONE) {
1484  "Error initializing an MFX session: %d.\n", err);
1485  ret = AVERROR_UNKNOWN;
1486  goto fail;
1487  }
1488 
1489  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1490  if (err != MFX_ERR_NONE) {
1491  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1492  "%d\n", err);
1493  ret = AVERROR_UNKNOWN;
1494  goto fail;
1495  }
1496 
1497  return 0;
1498 
1499 fail:
1500  if (hwctx->session)
1501  MFXClose(hwctx->session);
1502  return ret;
1503 }
1504 
1506  AVHWDeviceContext *child_device_ctx,
1507  AVDictionary *opts, int flags)
1508 {
1509  mfxIMPL impl;
1510  impl = choose_implementation("hw_any", child_device_ctx->type);
1511  return qsv_device_derive_from_child(ctx, impl,
1512  child_device_ctx, flags);
1513 }
1514 
1515 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1516  AVDictionary *opts, int flags)
1517 {
1518  QSVDevicePriv *priv;
1519  enum AVHWDeviceType child_device_type;
1520  AVHWDeviceContext *child_device;
1521  AVDictionary *child_device_opts;
1522  AVDictionaryEntry *e;
1523 
1524  mfxIMPL impl;
1525  int ret;
1526 
1527  priv = av_mallocz(sizeof(*priv));
1528  if (!priv)
1529  return AVERROR(ENOMEM);
1530 
1531  ctx->user_opaque = priv;
1532  ctx->free = qsv_device_free;
1533 
1534  e = av_dict_get(opts, "child_device_type", NULL, 0);
1535  if (e) {
1536  child_device_type = av_hwdevice_find_type_by_name(e->value);
1537  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
1538  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
1539  "\"%s\".\n", e->value);
1540  return AVERROR(EINVAL);
1541  }
1542  } else if (CONFIG_VAAPI) {
1543  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1544  } else if (CONFIG_DXVA2) {
1546  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
1547  "with old commandlines. This behaviour will be removed "
1548  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
1549  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1550  } else if (CONFIG_D3D11VA) {
1551  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
1552  } else {
1553  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1554  return AVERROR(ENOSYS);
1555  }
1556 
1557  child_device_opts = NULL;
1558  switch (child_device_type) {
1559 #if CONFIG_VAAPI
1561  {
1562  // libmfx does not actually implement VAAPI properly, rather it
1563  // depends on the specific behaviour of a matching iHD driver when
1564  // used on recent Intel hardware. Set options to the VAAPI device
1565  // creation so that we should pick a usable setup by default if
1566  // possible, even when multiple devices and drivers are available.
1567  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1568  av_dict_set(&child_device_opts, "driver", "iHD", 0);
1569  }
1570  break;
1571 #endif
1572 #if CONFIG_D3D11VA
1574  break;
1575 #endif
1576 #if CONFIG_DXVA2
1578  break;
1579 #endif
1580  default:
1581  {
1582  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1583  return AVERROR(ENOSYS);
1584  }
1585  break;
1586  }
1587 
1588  e = av_dict_get(opts, "child_device", NULL, 0);
1589  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1590  e ? e->value : NULL, child_device_opts, 0);
1591 
1592  av_dict_free(&child_device_opts);
1593  if (ret < 0)
1594  return ret;
1595 
1596  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1597 
1598  impl = choose_implementation(device, child_device_type);
1599 
1600  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1601 }
1602 
1605  .name = "QSV",
1606 
1607  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1608  .device_priv_size = sizeof(QSVDeviceContext),
1609  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1610  .frames_priv_size = sizeof(QSVFramesContext),
1611 
1612  .device_create = qsv_device_create,
1613  .device_derive = qsv_device_derive,
1614  .device_init = qsv_device_init,
1615  .frames_get_constraints = qsv_frames_get_constraints,
1616  .frames_init = qsv_frames_init,
1617  .frames_uninit = qsv_frames_uninit,
1618  .frames_get_buffer = qsv_get_buffer,
1619  .transfer_get_formats = qsv_transfer_get_formats,
1620  .transfer_data_to = qsv_transfer_data_to,
1621  .transfer_data_from = qsv_transfer_data_from,
1622  .map_to = qsv_map_to,
1623  .map_from = qsv_map_from,
1624  .frames_derive_to = qsv_frames_derive_to,
1625  .frames_derive_from = qsv_frames_derive_from,
1626 
1627  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1628 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:916
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:243
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:85
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:82
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1062
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2660
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:813
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:115
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:142
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:109
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:65
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
pixdesc.h
index
fg index
Definition: ffmpeg_filter.c:167
AVFrame::width
int width
Definition: frame.h:389
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1505
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:751
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:789
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:441
data
const char data[16]
Definition: mxf.c:143
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1375
QSVDeviceContext
Definition: hwcontext_qsv.c:62
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:82
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:737
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:99
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:200
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:513
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:64
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:993
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:338
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:58
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:68
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:296
fail
#define fail()
Definition: checkasm.h:127
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1338
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1176
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:97
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:550
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:456
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:142
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:93
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:522
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:454
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:387
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:59
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:257
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:51
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:63
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:88
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1603
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:246
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:56
src
#define src
Definition: vp8dsp.c:255
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:653
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:950
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:560
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:68
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:577
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:92
QSVFramesContext::session_upload_init
int session_upload_init
Definition: hwcontext_qsv.c:76
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:719
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:678
QSVFramesContext::session_download_init
int session_download_init
Definition: hwcontext_qsv.c:74
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:85
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1363
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:404
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:733
buffer.h
AVD3D11VAFramesContext::texture
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
Definition: hwcontext_d3d11va.h:152
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:1414
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:303
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:565
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:190
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:84
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:90
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:435
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:263
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:73
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
pthread_cond_t
Definition: os2threads.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:271
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1279
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:98
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:610
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:66
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:443
pthread_cond_signal
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
Definition: os2threads.h:152
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:555
supported_pixel_formats
static const struct @304 supported_pixel_formats[]
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:643
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:277
AVFrame::height
int height
Definition: frame.h:389
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:69
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
dummy
int dummy
Definition: motion.c:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:75
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1515
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:453
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:79
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:83
AVDictionaryEntry
Definition: dict.h:79
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:95
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
QSVFramesContext::realigned_tmp_frame
AVFrame realigned_tmp_frame
Definition: hwcontext_qsv.c:94
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:362
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:281
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:81
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:477
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:64