FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfx/mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41 
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52 
53 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
54  (MFX_VERSION_MAJOR > (MAJOR) || \
55  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
56 
57 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
58 
59 typedef struct QSVDevicePriv {
62 
63 typedef struct QSVDeviceContext {
64  mfxHDL handle;
65  mfxHandleType handle_type;
66  mfxVersion ver;
67  mfxIMPL impl;
68 
72 
73 typedef struct QSVFramesContext {
74  mfxSession session_download;
76  mfxSession session_upload;
78 #if HAVE_PTHREADS
79  pthread_mutex_t session_lock;
80 #endif
81 
83  mfxFrameSurface1 *surfaces_internal;
84  mfxHDLPair *handle_pairs_internal;
86 
87  // used in the frame allocator for non-opaque surfaces
88  mfxMemId *mem_ids;
89  // used in the opaque alloc request for opaque surfaces
90  mfxFrameSurface1 **surface_ptrs;
91 
92  mfxExtOpaqueSurfaceAlloc opaque_alloc;
93  mfxExtBuffer *ext_buffers[1];
97 
98 static const struct {
100  uint32_t fourcc;
102  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
103  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
104  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
105  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
106 #if CONFIG_VAAPI
108  MFX_FOURCC_YUY2 },
109 #if QSV_VERSION_ATLEAST(1, 27)
110  { AV_PIX_FMT_Y210,
111  MFX_FOURCC_Y210 },
112 #endif
113 #endif
114 };
115 
116 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
117  enum AVHWDeviceType base_dev_type,
118  void **base_handle);
119 
120 /**
121  * Caller needs to allocate enough space for base_handle pointer.
122  **/
123 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
124  enum AVHWDeviceType base_dev_type,
125  void **base_handle)
126 {
127  mfxHDLPair *handle_pair;
128  handle_pair = surf->Data.MemId;
129  switch (base_dev_type) {
130 #if CONFIG_VAAPI
132  base_handle[0] = handle_pair->first;
133  return 0;
134 #endif
135 #if CONFIG_D3D11VA
137  base_handle[0] = handle_pair->first;
138  base_handle[1] = handle_pair->second;
139  return 0;
140 #endif
141 #if CONFIG_DXVA2
143  base_handle[0] = handle_pair->first;
144  return 0;
145 #endif
146  }
147  return AVERROR(EINVAL);
148 }
149 
151 {
152  int i;
153  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
155  return supported_pixel_formats[i].fourcc;
156  }
157  return 0;
158 }
159 
160 #if CONFIG_D3D11VA
161 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
162 {
163  uint32_t bind_flags = 0;
164 
165  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
166  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
167  else
168  bind_flags = D3D11_BIND_DECODER;
169 
170  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
171  bind_flags = D3D11_BIND_RENDER_TARGET;
172 
173  return bind_flags;
174 }
175 #endif
176 
177 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
178 {
179  const AVPixFmtDescriptor *desc;
180  int i, planes_nb = 0;
181  if (dst->format != src->format)
182  return AVERROR(EINVAL);
183 
185 
186  for (i = 0; i < desc->nb_components; i++)
187  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
188 
189  for (i = 0; i < planes_nb; i++) {
190  int sheight, dheight, y;
191  ptrdiff_t swidth = av_image_get_linesize(src->format,
192  src->width,
193  i);
194  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
195  dst->width,
196  i);
197  const AVComponentDescriptor comp = desc->comp[i];
198  if (swidth < 0 || dwidth < 0) {
199  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
200  return AVERROR(EINVAL);
201  }
202  sheight = src->height;
203  dheight = dst->height;
204  if (i) {
205  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
206  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
207  }
208  //fill right padding
209  for (y = 0; y < sheight; y++) {
210  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
211  av_memcpy_backptr(line_ptr,
212  comp.depth > 8 ? 2 : 1,
213  dwidth - swidth);
214  }
215  //fill bottom padding
216  for (y = sheight; y < dheight; y++) {
217  memcpy(dst->data[i]+y*dst->linesize[i],
218  dst->data[i]+(sheight-1)*dst->linesize[i],
219  dwidth);
220  }
221  }
222  return 0;
223 }
224 
226 {
227  AVQSVDeviceContext *hwctx = ctx->hwctx;
228  QSVDeviceContext *s = ctx->internal->priv;
229  int hw_handle_supported = 0;
230  mfxHandleType handle_type;
231  enum AVHWDeviceType device_type;
232  enum AVPixelFormat pix_fmt;
233  mfxStatus err;
234 
235  err = MFXQueryIMPL(hwctx->session, &s->impl);
236  if (err == MFX_ERR_NONE)
237  err = MFXQueryVersion(hwctx->session, &s->ver);
238  if (err != MFX_ERR_NONE) {
239  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
240  return AVERROR_UNKNOWN;
241  }
242 
243  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
244 #if CONFIG_VAAPI
245  handle_type = MFX_HANDLE_VA_DISPLAY;
246  device_type = AV_HWDEVICE_TYPE_VAAPI;
248  hw_handle_supported = 1;
249 #endif
250  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
251 #if CONFIG_D3D11VA
252  handle_type = MFX_HANDLE_D3D11_DEVICE;
253  device_type = AV_HWDEVICE_TYPE_D3D11VA;
255  hw_handle_supported = 1;
256 #endif
257  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
258 #if CONFIG_DXVA2
259  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
260  device_type = AV_HWDEVICE_TYPE_DXVA2;
262  hw_handle_supported = 1;
263 #endif
264  }
265 
266  if (hw_handle_supported) {
267  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
268  if (err == MFX_ERR_NONE) {
269  s->handle_type = handle_type;
270  s->child_device_type = device_type;
271  s->child_pix_fmt = pix_fmt;
272  }
273  }
274  if (!s->handle) {
275  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
276  "from the session\n");
277  }
278  return 0;
279 }
280 
282 {
283  QSVFramesContext *s = ctx->internal->priv;
284 
285  if (s->session_download) {
286  MFXVideoVPP_Close(s->session_download);
287  MFXClose(s->session_download);
288  }
289  s->session_download = NULL;
290  s->session_download_init = 0;
291 
292  if (s->session_upload) {
293  MFXVideoVPP_Close(s->session_upload);
294  MFXClose(s->session_upload);
295  }
296  s->session_upload = NULL;
297  s->session_upload_init = 0;
298 
299 #if HAVE_PTHREADS
300  pthread_mutex_destroy(&s->session_lock);
301 #endif
302 
303  av_freep(&s->mem_ids);
304  av_freep(&s->surface_ptrs);
305  av_freep(&s->surfaces_internal);
306  av_freep(&s->handle_pairs_internal);
307  av_frame_unref(&s->realigned_upload_frame);
308  av_frame_unref(&s->realigned_download_frame);
309  av_buffer_unref(&s->child_frames_ref);
310 }
311 
312 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
313 {
314 }
315 
316 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
317 {
319  QSVFramesContext *s = ctx->internal->priv;
320  AVQSVFramesContext *hwctx = ctx->hwctx;
321 
322  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
323  s->nb_surfaces_used++;
324  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
325  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
326  }
327 
328  return NULL;
329 }
330 
332 {
333  AVQSVFramesContext *hwctx = ctx->hwctx;
334  QSVFramesContext *s = ctx->internal->priv;
335  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
336 
337  AVBufferRef *child_device_ref = NULL;
338  AVBufferRef *child_frames_ref = NULL;
339 
340  AVHWDeviceContext *child_device_ctx;
341  AVHWFramesContext *child_frames_ctx;
342 
343  int i, ret = 0;
344 
345  if (!device_priv->handle) {
347  "Cannot create a non-opaque internal surface pool without "
348  "a hardware handle\n");
349  return AVERROR(EINVAL);
350  }
351 
352  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
353  if (!child_device_ref)
354  return AVERROR(ENOMEM);
355  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
356 
357 #if CONFIG_VAAPI
358  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
359  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
360  child_device_hwctx->display = (VADisplay)device_priv->handle;
361  }
362 #endif
363 #if CONFIG_D3D11VA
364  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
365  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
366  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
367  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
368  }
369 #endif
370 #if CONFIG_DXVA2
371  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
372  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
373  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
374  }
375 #endif
376 
377  ret = av_hwdevice_ctx_init(child_device_ref);
378  if (ret < 0) {
379  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
380  goto fail;
381  }
382 
383  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
384  if (!child_frames_ref) {
385  ret = AVERROR(ENOMEM);
386  goto fail;
387  }
388  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
389 
390  child_frames_ctx->format = device_priv->child_pix_fmt;
391  child_frames_ctx->sw_format = ctx->sw_format;
392  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
393  child_frames_ctx->width = FFALIGN(ctx->width, 16);
394  child_frames_ctx->height = FFALIGN(ctx->height, 16);
395 
396 #if CONFIG_D3D11VA
397  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
398  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
399  if (hwctx->frame_type == 0)
400  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
401  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
402  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
403  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
404  }
405 #endif
406 #if CONFIG_DXVA2
407  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
408  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
409  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
410  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
411  else
412  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
413  }
414 #endif
415 
416  ret = av_hwframe_ctx_init(child_frames_ref);
417  if (ret < 0) {
418  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
419  goto fail;
420  }
421 
422 #if CONFIG_VAAPI
423  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
424  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
425  for (i = 0; i < ctx->initial_pool_size; i++) {
426  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
427  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
428  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
429  }
430  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
431  }
432 #endif
433 #if CONFIG_D3D11VA
434  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
435  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
436  for (i = 0; i < ctx->initial_pool_size; i++) {
437  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
438  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
439  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
440  } else {
441  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
442  }
443  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
444  }
445  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
446  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
447  } else {
448  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
449  }
450  }
451 #endif
452 #if CONFIG_DXVA2
453  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
454  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
455  for (i = 0; i < ctx->initial_pool_size; i++) {
456  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
457  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
458  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
459  }
460  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
461  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
462  else
463  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
464  }
465 #endif
466 
467  s->child_frames_ref = child_frames_ref;
468  child_frames_ref = NULL;
469 
470 fail:
471  av_buffer_unref(&child_device_ref);
472  av_buffer_unref(&child_frames_ref);
473  return ret;
474 }
475 
476 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
477 {
478  const AVPixFmtDescriptor *desc;
479  uint32_t fourcc;
480 
481  desc = av_pix_fmt_desc_get(ctx->sw_format);
482  if (!desc)
483  return AVERROR(EINVAL);
484 
485  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
486  if (!fourcc)
487  return AVERROR(EINVAL);
488 
489  surf->Info.BitDepthLuma = desc->comp[0].depth;
490  surf->Info.BitDepthChroma = desc->comp[0].depth;
491  surf->Info.Shift = desc->comp[0].depth > 8;
492 
493  if (desc->log2_chroma_w && desc->log2_chroma_h)
494  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
495  else if (desc->log2_chroma_w)
496  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
497  else
498  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
499 
500  surf->Info.FourCC = fourcc;
501  surf->Info.Width = FFALIGN(ctx->width, 16);
502  surf->Info.CropW = ctx->width;
503  surf->Info.Height = FFALIGN(ctx->height, 16);
504  surf->Info.CropH = ctx->height;
505  surf->Info.FrameRateExtN = 25;
506  surf->Info.FrameRateExtD = 1;
507  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
508 
509  return 0;
510 }
511 
513 {
514  QSVFramesContext *s = ctx->internal->priv;
515  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
516 
517  int i, ret = 0;
518 
519  if (ctx->initial_pool_size <= 0) {
520  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
521  return AVERROR(EINVAL);
522  }
523 
524  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
525  sizeof(*s->handle_pairs_internal));
526  if (!s->handle_pairs_internal)
527  return AVERROR(ENOMEM);
528 
529  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
530  sizeof(*s->surfaces_internal));
531  if (!s->surfaces_internal)
532  return AVERROR(ENOMEM);
533 
534  for (i = 0; i < ctx->initial_pool_size; i++) {
535  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
536  if (ret < 0)
537  return ret;
538  }
539 
540  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
542  if (ret < 0)
543  return ret;
544  }
545 
546  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
548  if (!ctx->internal->pool_internal)
549  return AVERROR(ENOMEM);
550 
551  frames_hwctx->surfaces = s->surfaces_internal;
552  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
553 
554  return 0;
555 }
556 
557 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
558  mfxFrameAllocResponse *resp)
559 {
560  AVHWFramesContext *ctx = pthis;
561  QSVFramesContext *s = ctx->internal->priv;
562  AVQSVFramesContext *hwctx = ctx->hwctx;
563  mfxFrameInfo *i = &req->Info;
564  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
565 
566  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
567  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
568  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
569  return MFX_ERR_UNSUPPORTED;
570  if (i->Width > i1->Width || i->Height > i1->Height ||
571  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
572  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
573  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
574  i->Width, i->Height, i->FourCC, i->ChromaFormat,
575  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
576  return MFX_ERR_UNSUPPORTED;
577  }
578 
579  resp->mids = s->mem_ids;
580  resp->NumFrameActual = hwctx->nb_surfaces;
581 
582  return MFX_ERR_NONE;
583 }
584 
585 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
586 {
587  return MFX_ERR_NONE;
588 }
589 
590 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
591 {
592  return MFX_ERR_UNSUPPORTED;
593 }
594 
595 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
596 {
597  return MFX_ERR_UNSUPPORTED;
598 }
599 
600 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
601 {
602  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
603  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
604 
605  pair_dst->first = pair_src->first;
606 
607  if (pair_src->second != (mfxMemId)MFX_INFINITE)
608  pair_dst->second = pair_src->second;
609  return MFX_ERR_NONE;
610 }
611 
613  mfxSession *session, int upload)
614 {
615  QSVFramesContext *s = ctx->internal->priv;
616  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
617  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
618  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
619 
620  mfxFrameAllocator frame_allocator = {
621  .pthis = ctx,
622  .Alloc = frame_alloc,
623  .Lock = frame_lock,
624  .Unlock = frame_unlock,
625  .GetHDL = frame_get_hdl,
626  .Free = frame_free,
627  };
628 
629  mfxVideoParam par;
630  mfxStatus err;
631 
632  err = MFXInit(device_priv->impl, &device_priv->ver, session);
633  if (err != MFX_ERR_NONE) {
634  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
635  return AVERROR_UNKNOWN;
636  }
637 
638  if (device_priv->handle) {
639  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
640  device_priv->handle);
641  if (err != MFX_ERR_NONE)
642  return AVERROR_UNKNOWN;
643  }
644 
645  if (!opaque) {
646  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
647  if (err != MFX_ERR_NONE)
648  return AVERROR_UNKNOWN;
649  }
650 
651  memset(&par, 0, sizeof(par));
652 
653  if (opaque) {
654  par.ExtParam = s->ext_buffers;
655  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
656  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
657  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
658  } else {
659  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
660  MFX_IOPATTERN_IN_VIDEO_MEMORY;
661  }
662 
663  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
664  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
665  par.AsyncDepth = 1;
666 
667  par.vpp.In = frames_hwctx->surfaces[0].Info;
668 
669  /* Apparently VPP requires the frame rate to be set to some value, otherwise
670  * init will fail (probably for the framerate conversion filter). Since we
671  * are only doing data upload/download here, we just invent an arbitrary
672  * value */
673  par.vpp.In.FrameRateExtN = 25;
674  par.vpp.In.FrameRateExtD = 1;
675  par.vpp.Out = par.vpp.In;
676 
677  err = MFXVideoVPP_Init(*session, &par);
678  if (err != MFX_ERR_NONE) {
679  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
680  "Surface upload/download will not be possible\n");
681  MFXClose(*session);
682  *session = NULL;
683  }
684 
685  return 0;
686 }
687 
689 {
690  QSVFramesContext *s = ctx->internal->priv;
691  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
692 
693  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
694 
695  uint32_t fourcc;
696  int i, ret;
697 
698  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
699  if (!fourcc) {
700  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
701  return AVERROR(ENOSYS);
702  }
703 
704  if (!ctx->pool) {
706  if (ret < 0) {
707  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
708  return ret;
709  }
710  }
711 
712  if (opaque) {
713  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
714  sizeof(*s->surface_ptrs));
715  if (!s->surface_ptrs)
716  return AVERROR(ENOMEM);
717 
718  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
719  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
720 
721  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
722  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
723  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
724 
725  s->opaque_alloc.Out = s->opaque_alloc.In;
726 
727  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
728  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
729 
730  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
731  } else {
732  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
733  if (!s->mem_ids)
734  return AVERROR(ENOMEM);
735 
736  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
737  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
738  }
739 
740  s->session_download = NULL;
741  s->session_upload = NULL;
742 
743  s->session_download_init = 0;
744  s->session_upload_init = 0;
745 
746 #if HAVE_PTHREADS
747  pthread_mutex_init(&s->session_lock, NULL);
748 #endif
749 
750  return 0;
751 }
752 
754 {
755  frame->buf[0] = av_buffer_pool_get(ctx->pool);
756  if (!frame->buf[0])
757  return AVERROR(ENOMEM);
758 
759  frame->data[3] = frame->buf[0]->data;
760  frame->format = AV_PIX_FMT_QSV;
761  frame->width = ctx->width;
762  frame->height = ctx->height;
763 
764  return 0;
765 }
766 
769  enum AVPixelFormat **formats)
770 {
771  enum AVPixelFormat *fmts;
772 
773  fmts = av_malloc_array(2, sizeof(*fmts));
774  if (!fmts)
775  return AVERROR(ENOMEM);
776 
777  fmts[0] = ctx->sw_format;
778  fmts[1] = AV_PIX_FMT_NONE;
779 
780  *formats = fmts;
781 
782  return 0;
783 }
784 
786  AVHWFramesContext *src_ctx, int flags)
787 {
788  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
789  int i;
790 
791  switch (dst_ctx->device_ctx->type) {
792 #if CONFIG_VAAPI
794  {
795  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
796  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
797  sizeof(*dst_hwctx->surface_ids));
798  if (!dst_hwctx->surface_ids)
799  return AVERROR(ENOMEM);
800  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
801  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
802  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
803  }
804  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
805  }
806  break;
807 #endif
808 #if CONFIG_D3D11VA
810  {
811  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
812  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
813  dst_hwctx->texture = (ID3D11Texture2D*)pair->first;
814  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
815  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
816  dst_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(src_hwctx->frame_type);
817  }
818  break;
819 #endif
820 #if CONFIG_DXVA2
822  {
823  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
824  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
825  sizeof(*dst_hwctx->surfaces));
826  if (!dst_hwctx->surfaces)
827  return AVERROR(ENOMEM);
828  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
829  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
830  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
831  }
832  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
833  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
834  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
835  else
836  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
837  }
838  break;
839 #endif
840  default:
841  return AVERROR(ENOSYS);
842  }
843 
844  return 0;
845 }
846 
848  AVFrame *dst, const AVFrame *src, int flags)
849 {
850  QSVFramesContext *s = ctx->internal->priv;
851  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
852  AVHWFramesContext *child_frames_ctx;
853  const AVPixFmtDescriptor *desc;
854  uint8_t *child_data;
855  AVFrame *dummy;
856  int ret = 0;
857 
858  if (!s->child_frames_ref)
859  return AVERROR(ENOSYS);
860  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
861 
862  switch (child_frames_ctx->device_ctx->type) {
863 #if CONFIG_VAAPI
865  {
866  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
867  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
868  * we need this casting for vaapi.
869  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
870  * to avoid compile warning */
871  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
872  break;
873  }
874 #endif
875 #if CONFIG_D3D11VA
877  {
878  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
879  child_data = pair->first;
880  break;
881  }
882 #endif
883 #if CONFIG_DXVA2
885  {
886  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
887  child_data = pair->first;
888  break;
889  }
890 #endif
891  default:
892  return AVERROR(ENOSYS);
893  }
894 
895  if (dst->format == child_frames_ctx->format) {
896  ret = ff_hwframe_map_create(s->child_frames_ref,
897  dst, src, NULL, NULL);
898  if (ret < 0)
899  return ret;
900 
901  dst->width = src->width;
902  dst->height = src->height;
903 
904  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
905  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
906  dst->data[0] = pair->first;
907  dst->data[1] = pair->second;
908  } else {
909  dst->data[3] = child_data;
910  }
911 
912  return 0;
913  }
914 
916  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
917  // This only supports mapping to software.
918  return AVERROR(ENOSYS);
919  }
920 
921  dummy = av_frame_alloc();
922  if (!dummy)
923  return AVERROR(ENOMEM);
924 
925  dummy->buf[0] = av_buffer_ref(src->buf[0]);
926  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
927  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
928  goto fail;
929 
930  dummy->format = child_frames_ctx->format;
931  dummy->width = src->width;
932  dummy->height = src->height;
933 
934  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
935  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
936  dummy->data[0] = pair->first;
937  dummy->data[1] = pair->second;
938  } else {
939  dummy->data[3] = child_data;
940  }
941 
942  ret = av_hwframe_map(dst, dummy, flags);
943 
944 fail:
946 
947  return ret;
948 }
949 
951  const AVFrame *src)
952 {
953  QSVFramesContext *s = ctx->internal->priv;
954  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
955  int download = !!src->hw_frames_ctx;
956  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
957 
958  AVFrame *dummy;
959  int ret;
960 
961  dummy = av_frame_alloc();
962  if (!dummy)
963  return AVERROR(ENOMEM);
964 
965  dummy->format = child_frames_ctx->format;
966  dummy->width = src->width;
967  dummy->height = src->height;
968  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
969  dummy->data[3] = surf->Data.MemId;
970  dummy->hw_frames_ctx = s->child_frames_ref;
971 
972  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
974 
975  dummy->buf[0] = NULL;
976  dummy->data[3] = NULL;
977  dummy->hw_frames_ctx = NULL;
978 
980 
981  return ret;
982 }
983 
984 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
985 {
986  switch (frame->format) {
987  case AV_PIX_FMT_NV12:
988  case AV_PIX_FMT_P010:
989  surface->Data.Y = frame->data[0];
990  surface->Data.UV = frame->data[1];
991  break;
992 
993  case AV_PIX_FMT_YUV420P:
994  surface->Data.Y = frame->data[0];
995  surface->Data.U = frame->data[1];
996  surface->Data.V = frame->data[2];
997  break;
998 
999  case AV_PIX_FMT_BGRA:
1000  surface->Data.B = frame->data[0];
1001  surface->Data.G = frame->data[0] + 1;
1002  surface->Data.R = frame->data[0] + 2;
1003  surface->Data.A = frame->data[0] + 3;
1004  break;
1005 #if CONFIG_VAAPI
1006  case AV_PIX_FMT_YUYV422:
1007  surface->Data.Y = frame->data[0];
1008  surface->Data.U = frame->data[0] + 1;
1009  surface->Data.V = frame->data[0] + 3;
1010  break;
1011 
1012  case AV_PIX_FMT_Y210:
1013  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1014  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1015  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1016  break;
1017 #endif
1018  default:
1019  return MFX_ERR_UNSUPPORTED;
1020  }
1021  surface->Data.Pitch = frame->linesize[0];
1022  surface->Data.TimeStamp = frame->pts;
1023 
1024  return 0;
1025 }
1026 
1028 {
1029  QSVFramesContext *s = ctx->internal->priv;
1030  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1031  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1032  int ret = 0;
1033 
1034  if (atomic_load(inited))
1035  return 0;
1036 
1037 #if HAVE_PTHREADS
1038  pthread_mutex_lock(&s->session_lock);
1039 #endif
1040 
1041  if (!atomic_load(inited)) {
1042  ret = qsv_init_internal_session(ctx, session, upload);
1043  atomic_store(inited, 1);
1044  }
1045 
1046 #if HAVE_PTHREADS
1047  pthread_mutex_unlock(&s->session_lock);
1048 #endif
1049 
1050  return ret;
1051 }
1052 
1054  const AVFrame *src)
1055 {
1056  QSVFramesContext *s = ctx->internal->priv;
1057  mfxFrameSurface1 out = {{ 0 }};
1058  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1059 
1060  mfxSyncPoint sync = NULL;
1061  mfxStatus err;
1062  int ret = 0;
1063  /* download to temp frame if the output is not padded as libmfx requires */
1064  AVFrame *tmp_frame = &s->realigned_download_frame;
1065  AVFrame *dst_frame;
1066  int realigned = 0;
1067 
1069  if (ret < 0)
1070  return ret;
1071 
1072  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1073  * Height must be a multiple of 16 for progressive frame sequence and a
1074  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1075  if (dst->height & 15 || dst->linesize[0] & 15) {
1076  realigned = 1;
1077  if (tmp_frame->format != dst->format ||
1078  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1079  tmp_frame->height != FFALIGN(dst->height, 16)) {
1080  av_frame_unref(tmp_frame);
1081 
1082  tmp_frame->format = dst->format;
1083  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1084  tmp_frame->height = FFALIGN(dst->height, 16);
1085  ret = av_frame_get_buffer(tmp_frame, 0);
1086  if (ret < 0)
1087  return ret;
1088  }
1089  }
1090 
1091  dst_frame = realigned ? tmp_frame : dst;
1092 
1093  if (!s->session_download) {
1094  if (s->child_frames_ref)
1095  return qsv_transfer_data_child(ctx, dst_frame, src);
1096 
1097  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1098  return AVERROR(ENOSYS);
1099  }
1100 
1101  out.Info = in->Info;
1102  map_frame_to_surface(dst_frame, &out);
1103 
1104  do {
1105  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1106  if (err == MFX_WRN_DEVICE_BUSY)
1107  av_usleep(1);
1108  } while (err == MFX_WRN_DEVICE_BUSY);
1109 
1110  if (err < 0 || !sync) {
1111  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1112  return AVERROR_UNKNOWN;
1113  }
1114 
1115  do {
1116  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1117  } while (err == MFX_WRN_IN_EXECUTION);
1118  if (err < 0) {
1119  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1120  return AVERROR_UNKNOWN;
1121  }
1122 
1123  if (realigned) {
1124  tmp_frame->width = dst->width;
1125  tmp_frame->height = dst->height;
1126  ret = av_frame_copy(dst, tmp_frame);
1127  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1128  tmp_frame->height = FFALIGN(dst->height, 16);
1129  if (ret < 0)
1130  return ret;
1131  }
1132 
1133  return 0;
1134 }
1135 
1137  const AVFrame *src)
1138 {
1139  QSVFramesContext *s = ctx->internal->priv;
1140  mfxFrameSurface1 in = {{ 0 }};
1141  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1142  mfxFrameInfo tmp_info;
1143 
1144  mfxSyncPoint sync = NULL;
1145  mfxStatus err;
1146  int ret = 0;
1147  /* make a copy if the input is not padded as libmfx requires */
1148  AVFrame *tmp_frame = &s->realigned_upload_frame;
1149  const AVFrame *src_frame;
1150  int realigned = 0;
1151 
1153  if (ret < 0)
1154  return ret;
1155 
1156  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1157  * Height must be a multiple of 16 for progressive frame sequence and a
1158  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1159  if (src->height & 15 || src->linesize[0] & 15) {
1160  realigned = 1;
1161  if (tmp_frame->format != src->format ||
1162  tmp_frame->width != FFALIGN(src->width, 16) ||
1163  tmp_frame->height != FFALIGN(src->height, 16)) {
1164  av_frame_unref(tmp_frame);
1165 
1166  tmp_frame->format = src->format;
1167  tmp_frame->width = FFALIGN(src->width, 16);
1168  tmp_frame->height = FFALIGN(src->height, 16);
1169  ret = av_frame_get_buffer(tmp_frame, 0);
1170  if (ret < 0)
1171  return ret;
1172  }
1173  ret = av_frame_copy(tmp_frame, src);
1174  if (ret < 0) {
1175  av_frame_unref(tmp_frame);
1176  return ret;
1177  }
1178  ret = qsv_fill_border(tmp_frame, src);
1179  if (ret < 0) {
1180  av_frame_unref(tmp_frame);
1181  return ret;
1182  }
1183 
1184  tmp_info = out->Info;
1185  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1186  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1187  }
1188 
1189  src_frame = realigned ? tmp_frame : src;
1190 
1191  if (!s->session_upload) {
1192  if (s->child_frames_ref)
1193  return qsv_transfer_data_child(ctx, dst, src_frame);
1194 
1195  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1196  return AVERROR(ENOSYS);
1197  }
1198 
1199  in.Info = out->Info;
1200  map_frame_to_surface(src_frame, &in);
1201 
1202  do {
1203  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1204  if (err == MFX_WRN_DEVICE_BUSY)
1205  av_usleep(1);
1206  } while (err == MFX_WRN_DEVICE_BUSY);
1207 
1208  if (err < 0 || !sync) {
1209  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1210  return AVERROR_UNKNOWN;
1211  }
1212 
1213  do {
1214  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1215  } while (err == MFX_WRN_IN_EXECUTION);
1216  if (err < 0) {
1217  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1218  return AVERROR_UNKNOWN;
1219  }
1220 
1221  if (realigned) {
1222  out->Info.CropW = tmp_info.CropW;
1223  out->Info.CropH = tmp_info.CropH;
1224  }
1225 
1226  return 0;
1227 }
1228 
1230  AVHWFramesContext *src_ctx, int flags)
1231 {
1232  QSVFramesContext *s = dst_ctx->internal->priv;
1233  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1234  int i;
1235 
1236  if (src_ctx->initial_pool_size == 0) {
1237  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1238  "mapped to QSV frames.\n");
1239  return AVERROR(EINVAL);
1240  }
1241 
1242  switch (src_ctx->device_ctx->type) {
1243 #if CONFIG_VAAPI
1245  {
1246  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1247  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1248  sizeof(*s->handle_pairs_internal));
1249  if (!s->handle_pairs_internal)
1250  return AVERROR(ENOMEM);
1251  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1252  sizeof(*s->surfaces_internal));
1253  if (!s->surfaces_internal)
1254  return AVERROR(ENOMEM);
1255  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1256  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1257  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1258  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1259  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1260  }
1261  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1262  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1263  }
1264  break;
1265 #endif
1266 #if CONFIG_D3D11VA
1268  {
1269  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1270  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1271  sizeof(*s->handle_pairs_internal));
1272  if (!s->handle_pairs_internal)
1273  return AVERROR(ENOMEM);
1274  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1275  sizeof(*s->surfaces_internal));
1276  if (!s->surfaces_internal)
1277  return AVERROR(ENOMEM);
1278  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1279  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1280  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1281  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1282  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1283  } else {
1284  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1285  }
1286  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1287  }
1288  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1289  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1290  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1291  } else {
1292  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1293  }
1294  }
1295  break;
1296 #endif
1297 #if CONFIG_DXVA2
1299  {
1300  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1301  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1302  sizeof(*s->handle_pairs_internal));
1303  if (!s->handle_pairs_internal)
1304  return AVERROR(ENOMEM);
1305  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1306  sizeof(*s->surfaces_internal));
1307  if (!s->surfaces_internal)
1308  return AVERROR(ENOMEM);
1309  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1310  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1311  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1312  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1313  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1314  }
1315  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1316  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1317  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1318  else
1319  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1320  }
1321  break;
1322 #endif
1323  default:
1324  return AVERROR(ENOSYS);
1325  }
1326 
1327  dst_hwctx->surfaces = s->surfaces_internal;
1328 
1329  return 0;
1330 }
1331 
1332 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1333  AVFrame *dst, const AVFrame *src, int flags)
1334 {
1335  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1336  int i, err, index = -1;
1337 
1338  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1339  switch(src->format) {
1340 #if CONFIG_VAAPI
1341  case AV_PIX_FMT_VAAPI:
1342  {
1343  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1344  if (*(VASurfaceID*)pair->first == (VASurfaceID)src->data[3]) {
1345  index = i;
1346  break;
1347  }
1348  }
1349 #endif
1350 #if CONFIG_D3D11VA
1351  case AV_PIX_FMT_D3D11:
1352  {
1353  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1354  if (pair->first == src->data[0]
1355  && pair->second == src->data[1]) {
1356  index = i;
1357  break;
1358  }
1359  }
1360 #endif
1361 #if CONFIG_DXVA2
1362  case AV_PIX_FMT_DXVA2_VLD:
1363  {
1364  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1365  if (pair->first == src->data[3]) {
1366  index = i;
1367  break;
1368  }
1369  }
1370 #endif
1371  }
1372  }
1373  if (index < 0) {
1374  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1375  "is not in the mapped frames context.\n");
1376  return AVERROR(EINVAL);
1377  }
1378 
1380  dst, src, NULL, NULL);
1381  if (err)
1382  return err;
1383 
1384  dst->width = src->width;
1385  dst->height = src->height;
1386  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1387 
1388  return 0;
1389 }
1390 
1392  const void *hwconfig,
1393  AVHWFramesConstraints *constraints)
1394 {
1395  int i;
1396 
1398  sizeof(*constraints->valid_sw_formats));
1399  if (!constraints->valid_sw_formats)
1400  return AVERROR(ENOMEM);
1401 
1402  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1403  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1405 
1406  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1407  if (!constraints->valid_hw_formats)
1408  return AVERROR(ENOMEM);
1409 
1410  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1411  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1412 
1413  return 0;
1414 }
1415 
1417 {
1418  AVQSVDeviceContext *hwctx = ctx->hwctx;
1419  QSVDevicePriv *priv = ctx->user_opaque;
1420 
1421  if (hwctx->session)
1422  MFXClose(hwctx->session);
1423 
1425  av_freep(&priv);
1426 }
1427 
1428 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1429 {
1430  static const struct {
1431  const char *name;
1432  mfxIMPL impl;
1433  } impl_map[] = {
1434  { "auto", MFX_IMPL_AUTO },
1435  { "sw", MFX_IMPL_SOFTWARE },
1436  { "hw", MFX_IMPL_HARDWARE },
1437  { "auto_any", MFX_IMPL_AUTO_ANY },
1438  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1439  { "hw2", MFX_IMPL_HARDWARE2 },
1440  { "hw3", MFX_IMPL_HARDWARE3 },
1441  { "hw4", MFX_IMPL_HARDWARE4 },
1442  };
1443 
1444  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1445  int i;
1446 
1447  if (device) {
1448  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1449  if (!strcmp(device, impl_map[i].name)) {
1450  impl = impl_map[i].impl;
1451  break;
1452  }
1453  if (i == FF_ARRAY_ELEMS(impl_map))
1454  impl = strtol(device, NULL, 0);
1455  }
1456 
1457  if (impl != MFX_IMPL_SOFTWARE) {
1458  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1459  impl |= MFX_IMPL_VIA_D3D11;
1460  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1461  impl |= MFX_IMPL_VIA_D3D9;
1462  }
1463 
1464  return impl;
1465 }
1466 
1468  mfxIMPL implementation,
1469  AVHWDeviceContext *child_device_ctx,
1470  int flags)
1471 {
1472  AVQSVDeviceContext *hwctx = ctx->hwctx;
1473 
1474  mfxVersion ver = { { 3, 1 } };
1475  mfxHDL handle;
1476  mfxHandleType handle_type;
1477  mfxStatus err;
1478  int ret;
1479 
1480  switch (child_device_ctx->type) {
1481 #if CONFIG_VAAPI
1483  {
1484  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1485  handle_type = MFX_HANDLE_VA_DISPLAY;
1486  handle = (mfxHDL)child_device_hwctx->display;
1487  }
1488  break;
1489 #endif
1490 #if CONFIG_D3D11VA
1492  {
1493  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1494  handle_type = MFX_HANDLE_D3D11_DEVICE;
1495  handle = (mfxHDL)child_device_hwctx->device;
1496  }
1497  break;
1498 #endif
1499 #if CONFIG_DXVA2
1501  {
1502  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1503  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1504  handle = (mfxHDL)child_device_hwctx->devmgr;
1505  }
1506  break;
1507 #endif
1508  default:
1509  ret = AVERROR(ENOSYS);
1510  goto fail;
1511  }
1512 
1513  err = MFXInit(implementation, &ver, &hwctx->session);
1514  if (err != MFX_ERR_NONE) {
1515  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1516  "%d.\n", err);
1517  ret = AVERROR_UNKNOWN;
1518  goto fail;
1519  }
1520 
1521  err = MFXQueryVersion(hwctx->session, &ver);
1522  if (err != MFX_ERR_NONE) {
1523  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1524  ret = AVERROR_UNKNOWN;
1525  goto fail;
1526  }
1527 
1529  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1530  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1531 
1532  MFXClose(hwctx->session);
1533 
1534  err = MFXInit(implementation, &ver, &hwctx->session);
1535  if (err != MFX_ERR_NONE) {
1537  "Error initializing an MFX session: %d.\n", err);
1538  ret = AVERROR_UNKNOWN;
1539  goto fail;
1540  }
1541 
1542  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1543  if (err != MFX_ERR_NONE) {
1544  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1545  "%d\n", err);
1546  ret = AVERROR_UNKNOWN;
1547  goto fail;
1548  }
1549 
1550  return 0;
1551 
1552 fail:
1553  if (hwctx->session)
1554  MFXClose(hwctx->session);
1555  return ret;
1556 }
1557 
1559  AVHWDeviceContext *child_device_ctx,
1560  AVDictionary *opts, int flags)
1561 {
1562  mfxIMPL impl;
1563  impl = choose_implementation("hw_any", child_device_ctx->type);
1564  return qsv_device_derive_from_child(ctx, impl,
1565  child_device_ctx, flags);
1566 }
1567 
1568 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1569  AVDictionary *opts, int flags)
1570 {
1571  QSVDevicePriv *priv;
1572  enum AVHWDeviceType child_device_type;
1573  AVHWDeviceContext *child_device;
1574  AVDictionary *child_device_opts;
1575  AVDictionaryEntry *e;
1576 
1577  mfxIMPL impl;
1578  int ret;
1579 
1580  priv = av_mallocz(sizeof(*priv));
1581  if (!priv)
1582  return AVERROR(ENOMEM);
1583 
1584  ctx->user_opaque = priv;
1585  ctx->free = qsv_device_free;
1586 
1587  e = av_dict_get(opts, "child_device_type", NULL, 0);
1588  if (e) {
1589  child_device_type = av_hwdevice_find_type_by_name(e->value);
1590  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
1591  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
1592  "\"%s\".\n", e->value);
1593  return AVERROR(EINVAL);
1594  }
1595  } else if (CONFIG_VAAPI) {
1596  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1597  } else if (CONFIG_DXVA2) {
1599  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
1600  "with old commandlines. This behaviour will be removed "
1601  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
1602  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1603  } else if (CONFIG_D3D11VA) {
1604  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
1605  } else {
1606  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1607  return AVERROR(ENOSYS);
1608  }
1609 
1610  child_device_opts = NULL;
1611  switch (child_device_type) {
1612 #if CONFIG_VAAPI
1614  {
1615  // libmfx does not actually implement VAAPI properly, rather it
1616  // depends on the specific behaviour of a matching iHD driver when
1617  // used on recent Intel hardware. Set options to the VAAPI device
1618  // creation so that we should pick a usable setup by default if
1619  // possible, even when multiple devices and drivers are available.
1620  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1621  av_dict_set(&child_device_opts, "driver", "iHD", 0);
1622  }
1623  break;
1624 #endif
1625 #if CONFIG_D3D11VA
1627  break;
1628 #endif
1629 #if CONFIG_DXVA2
1631  break;
1632 #endif
1633  default:
1634  {
1635  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1636  return AVERROR(ENOSYS);
1637  }
1638  break;
1639  }
1640 
1641  e = av_dict_get(opts, "child_device", NULL, 0);
1642  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1643  e ? e->value : NULL, child_device_opts, 0);
1644 
1645  av_dict_free(&child_device_opts);
1646  if (ret < 0)
1647  return ret;
1648 
1649  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1650 
1651  impl = choose_implementation(device, child_device_type);
1652 
1653  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1654 }
1655 
1658  .name = "QSV",
1659 
1660  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1661  .device_priv_size = sizeof(QSVDeviceContext),
1662  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1663  .frames_priv_size = sizeof(QSVFramesContext),
1664 
1665  .device_create = qsv_device_create,
1666  .device_derive = qsv_device_derive,
1667  .device_init = qsv_device_init,
1668  .frames_get_constraints = qsv_frames_get_constraints,
1669  .frames_init = qsv_frames_init,
1670  .frames_uninit = qsv_frames_uninit,
1671  .frames_get_buffer = qsv_get_buffer,
1672  .transfer_get_formats = qsv_transfer_get_formats,
1673  .transfer_data_to = qsv_transfer_data_to,
1674  .transfer_data_from = qsv_transfer_data_from,
1675  .map_to = qsv_map_to,
1676  .map_from = qsv_map_from,
1677  .frames_derive_to = qsv_frames_derive_to,
1678  .frames_derive_from = qsv_frames_derive_from,
1679 
1680  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1681 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:950
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:254
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:86
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:82
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1136
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2662
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:847
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:150
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:177
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:66
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
AVFrame::width
int width
Definition: frame.h:397
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1558
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:785
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:790
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:476
data
const char data[16]
Definition: mxf.c:143
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1428
QSVDeviceContext
Definition: hwcontext_qsv.c:63
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:83
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:738
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:100
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:201
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:65
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1053
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:59
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:69
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:331
fail
#define fail()
Definition: checkasm.h:130
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1391
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:75
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1229
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:585
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:458
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:143
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:93
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:77
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:557
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:455
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:387
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:60
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:256
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:64
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:88
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1656
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:281
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:57
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1027
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:688
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:984
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:595
index
int index
Definition: gxfenc.c:89
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:77
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:94
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:612
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:92
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:753
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:755
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:85
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1416
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:123
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:767
buffer.h
AVD3D11VAFramesContext::texture
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
Definition: hwcontext_d3d11va.h:152
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:1467
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:305
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
supported_pixel_formats
static const struct @310 supported_pixel_formats[]
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:600
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:225
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:84
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:90
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:477
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:74
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1332
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:99
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:67
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:95
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:444
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:590
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:659
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:312
AVFrame::height
int height
Definition: frame.h:397
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:70
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
dummy
int dummy
Definition: motion.c:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:76
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1568
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:455
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:83
AVDictionaryEntry
Definition: dict.h:79
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:101
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:316
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:81
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:512
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:73