FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfxvideo.h>
22 #include <mfxjpeg.h>
23 #include <mfxvp8.h>
24 
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/common.h"
30 #include "libavutil/error.h"
31 #include "libavutil/hwcontext.h"
33 #include "libavutil/avassert.h"
34 #include "libavutil/mem.h"
35 
36 #include "avcodec.h"
37 #include "qsv_internal.h"
38 
39 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
40 #define QSV_HAVE_USER_PLUGIN !QSV_ONEVPL
41 #define QSV_HAVE_AUDIO !QSV_ONEVPL
42 
43 #if QSV_HAVE_USER_PLUGIN
44 #include <mfxplugin.h>
45 #endif
46 
47 #if QSV_ONEVPL
48 #include <mfxdispatcher.h>
49 #else
50 #define MFXUnload(a) do { } while(0)
51 #endif
52 
54 {
55  switch (codec_id) {
56  case AV_CODEC_ID_H264:
57  return MFX_CODEC_AVC;
58  case AV_CODEC_ID_HEVC:
59  return MFX_CODEC_HEVC;
62  return MFX_CODEC_MPEG2;
63  case AV_CODEC_ID_VC1:
64  return MFX_CODEC_VC1;
65  case AV_CODEC_ID_VP8:
66  return MFX_CODEC_VP8;
67  case AV_CODEC_ID_MJPEG:
68  return MFX_CODEC_JPEG;
69  case AV_CODEC_ID_VP9:
70  return MFX_CODEC_VP9;
71 #if QSV_VERSION_ATLEAST(1, 34)
72  case AV_CODEC_ID_AV1:
73  return MFX_CODEC_AV1;
74 #endif
75 
76  default:
77  break;
78  }
79 
80  return AVERROR(ENOSYS);
81 }
82 
83 static const struct {
85  const char *desc;
86 } qsv_iopatterns[] = {
87  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
88  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
89 #if QSV_HAVE_OPAQUE
90  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
91 #endif
92  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
93  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
94 #if QSV_HAVE_OPAQUE
95  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
96 #endif
97 };
98 
99 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
100  const char *extra_string)
101 {
102  const char *desc = NULL;
103 
104  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
106  desc = qsv_iopatterns[i].desc;
107  }
108  }
109  if (!desc)
110  desc = "unknown iopattern";
111 
112  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
113  return 0;
114 }
115 
116 static const struct {
117  mfxStatus mfxerr;
118  int averr;
119  const char *desc;
120 } qsv_errors[] = {
121  { MFX_ERR_NONE, 0, "success" },
122  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
123  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
124  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
125  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
126  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
127  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
128  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
129  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
130  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
131  /* the following 3 errors should always be handled explicitly, so those "mappings"
132  * are for completeness only */
133  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
134  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
135  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
136  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
137  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
138  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
139  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
140  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
141  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
142 #if QSV_HAVE_AUDIO
143  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
144  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
145 #endif
146  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
147  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
148 
149  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
150  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
151  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
152  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
153  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
154  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
155  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
156  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
157 #if QSV_HAVE_AUDIO
158  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
159 #endif
160 
161 #if QSV_VERSION_ATLEAST(1, 31)
162  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
163 #endif
164 };
165 
166 /**
167  * Convert a libmfx error code into an FFmpeg error code.
168  */
169 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
170 {
171  int i;
172  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
173  if (qsv_errors[i].mfxerr == mfx_err) {
174  if (desc)
175  *desc = qsv_errors[i].desc;
176  return qsv_errors[i].averr;
177  }
178  }
179  if (desc)
180  *desc = "unknown error";
181  return AVERROR_UNKNOWN;
182 }
183 
184 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
185  const char *error_string)
186 {
187  const char *desc;
188  int ret = qsv_map_error(err, &desc);
189  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
190  return ret;
191 }
192 
193 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
194  const char *warning_string)
195 {
196  const char *desc;
197  int ret = qsv_map_error(err, &desc);
198  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
199  return ret;
200 }
201 
203 {
204  switch (fourcc) {
205  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
206  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
207  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
208  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
209  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
210  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
211  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
212  case MFX_FOURCC_AYUV: return AV_PIX_FMT_VUYX;
213  case MFX_FOURCC_Y410: return AV_PIX_FMT_XV30;
214 #if QSV_VERSION_ATLEAST(1, 31)
215  case MFX_FOURCC_P016: return AV_PIX_FMT_P012;
216  case MFX_FOURCC_Y216: return AV_PIX_FMT_Y212;
217  case MFX_FOURCC_Y416: return AV_PIX_FMT_XV36;
218 #endif
219  }
220  return AV_PIX_FMT_NONE;
221 }
222 
223 int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
224 {
225  switch (format) {
226  case AV_PIX_FMT_YUV420P:
227  case AV_PIX_FMT_YUVJ420P:
228  case AV_PIX_FMT_NV12:
229  *fourcc = MFX_FOURCC_NV12;
230  *shift = 0;
231  return AV_PIX_FMT_NV12;
233  case AV_PIX_FMT_P010:
234  *fourcc = MFX_FOURCC_P010;
235  *shift = 1;
236  return AV_PIX_FMT_P010;
237  case AV_PIX_FMT_X2RGB10:
238  *fourcc = MFX_FOURCC_A2RGB10;
239  *shift = 1;
240  return AV_PIX_FMT_X2RGB10;
241  case AV_PIX_FMT_BGRA:
242  *fourcc = MFX_FOURCC_RGB4;
243  *shift = 0;
244  return AV_PIX_FMT_BGRA;
245  case AV_PIX_FMT_YUV422P:
246  case AV_PIX_FMT_YUYV422:
247  *fourcc = MFX_FOURCC_YUY2;
248  *shift = 0;
249  return AV_PIX_FMT_YUYV422;
251  case AV_PIX_FMT_Y210:
252  *fourcc = MFX_FOURCC_Y210;
253  *shift = 1;
254  return AV_PIX_FMT_Y210;
255  case AV_PIX_FMT_VUYX:
256  *fourcc = MFX_FOURCC_AYUV;
257  *shift = 0;
258  return AV_PIX_FMT_VUYX;
259  case AV_PIX_FMT_XV30:
260  *fourcc = MFX_FOURCC_Y410;
261  *shift = 0;
262  return AV_PIX_FMT_XV30;
263 #if QSV_VERSION_ATLEAST(1, 31)
264  case AV_PIX_FMT_P012:
265  *fourcc = MFX_FOURCC_P016;
266  *shift = 1;
267  return AV_PIX_FMT_P012;
268  case AV_PIX_FMT_Y212:
269  *fourcc = MFX_FOURCC_Y216;
270  *shift = 1;
271  return AV_PIX_FMT_Y212;
272  case AV_PIX_FMT_XV36:
273  *fourcc = MFX_FOURCC_Y416;
274  *shift = 1;
275  return AV_PIX_FMT_XV36;
276 #endif
277  default:
278  return AVERROR(ENOSYS);
279  }
280 }
281 
282 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
283 {
284  switch (frame->format) {
285  case AV_PIX_FMT_NV12:
286  case AV_PIX_FMT_P010:
287  case AV_PIX_FMT_P012:
288  surface->Data.Y = frame->data[0];
289  surface->Data.UV = frame->data[1];
290  /* The SDK checks Data.V when using system memory for VP9 encoding */
291  surface->Data.V = surface->Data.UV + 1;
292  break;
294  case AV_PIX_FMT_BGRA:
295  surface->Data.B = frame->data[0];
296  surface->Data.G = frame->data[0] + 1;
297  surface->Data.R = frame->data[0] + 2;
298  surface->Data.A = frame->data[0] + 3;
299  break;
300  case AV_PIX_FMT_YUYV422:
301  surface->Data.Y = frame->data[0];
302  surface->Data.U = frame->data[0] + 1;
303  surface->Data.V = frame->data[0] + 3;
304  break;
305 
306  case AV_PIX_FMT_Y210:
307  case AV_PIX_FMT_Y212:
308  surface->Data.Y16 = (mfxU16 *)frame->data[0];
309  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
310  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
311  break;
312 
313  case AV_PIX_FMT_VUYX:
314  surface->Data.V = frame->data[0];
315  surface->Data.U = frame->data[0] + 1;
316  surface->Data.Y = frame->data[0] + 2;
317  // Only set Data.A to a valid address, the SDK doesn't
318  // use the value from the frame.
319  surface->Data.A = frame->data[0] + 3;
320  break;
321 
322  case AV_PIX_FMT_XV30:
323  surface->Data.U = frame->data[0];
324  break;
325 
326  case AV_PIX_FMT_XV36:
327  surface->Data.U = frame->data[0];
328  surface->Data.Y = frame->data[0] + 2;
329  surface->Data.V = frame->data[0] + 4;
330  // Only set Data.A to a valid address, the SDK doesn't
331  // use the value from the frame.
332  surface->Data.A = frame->data[0] + 6;
333  break;
334 
335  default:
336  return AVERROR(ENOSYS);
337  }
338  surface->Data.PitchLow = frame->linesize[0];
339 
340  return 0;
341 }
342 
344 {
345  int i;
346  for (i = 0; i < ctx->nb_mids; i++) {
347  QSVMid *mid = &ctx->mids[i];
348  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
349  if ((mid->handle_pair->first == pair->first) &&
350  (mid->handle_pair->second == pair->second))
351  return i;
352  }
353  return AVERROR_BUG;
354 }
355 
356 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
357 {
359  switch (mfx_pic_struct & 0xF) {
360  case MFX_PICSTRUCT_PROGRESSIVE:
362  break;
363  case MFX_PICSTRUCT_FIELD_TFF:
364  field = AV_FIELD_TT;
365  break;
366  case MFX_PICSTRUCT_FIELD_BFF:
367  field = AV_FIELD_BB;
368  break;
369  }
370 
371  return field;
372 }
373 
374 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
375 {
376  enum AVPictureType type;
377  switch (mfx_pic_type & 0x7) {
378  case MFX_FRAMETYPE_I:
379  if (mfx_pic_type & MFX_FRAMETYPE_S)
381  else
383  break;
384  case MFX_FRAMETYPE_B:
386  break;
387  case MFX_FRAMETYPE_P:
388  if (mfx_pic_type & MFX_FRAMETYPE_S)
390  else
392  break;
393  case MFX_FRAMETYPE_UNKNOWN:
395  break;
396  default:
397  av_assert0(0);
398  }
399 
400  return type;
401 }
402 
403 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
404  void *logctx)
405 {
406 #if QSV_HAVE_USER_PLUGIN
407  if (!load_plugins || !*load_plugins)
408  return 0;
409 
410  while (*load_plugins) {
411  mfxPluginUID uid;
412  mfxStatus ret;
413  int i, err = 0;
414 
415  char *plugin = av_get_token(&load_plugins, ":");
416  if (!plugin)
417  return AVERROR(ENOMEM);
418  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
419  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
420  err = AVERROR(EINVAL);
421  goto load_plugin_fail;
422  }
423 
424  for (i = 0; i < sizeof(uid.Data); i++) {
425  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
426  if (err != 1) {
427  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
428  err = AVERROR(EINVAL);
429  goto load_plugin_fail;
430  }
431 
432  }
433 
434  ret = MFXVideoUSER_Load(session, &uid, 1);
435  if (ret < 0) {
436  char errorbuf[128];
437  snprintf(errorbuf, sizeof(errorbuf),
438  "Could not load the requested plugin '%s'", plugin);
439  err = ff_qsv_print_error(logctx, ret, errorbuf);
440  goto load_plugin_fail;
441  }
442 
443  if (*load_plugins)
444  load_plugins++;
445 load_plugin_fail:
446  av_freep(&plugin);
447  if (err < 0)
448  return err;
449  }
450 #endif
451 
452  return 0;
453 
454 }
455 
456 //This code is only required for Linux since a display handle is required.
457 //For Windows the session is complete and ready to use.
458 
459 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
460 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
461 {
462  AVDictionary *child_device_opts = NULL;
463  AVVAAPIDeviceContext *hwctx;
464  int ret;
465 
466  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
467  av_dict_set(&child_device_opts, "driver", "iHD", 0);
468 
469  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
470  av_dict_free(&child_device_opts);
471  if (ret < 0) {
472  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
473  return ret;
474  } else {
475  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
476  hwctx = qs->va_device_ctx->hwctx;
477 
478  ret = MFXVideoCORE_SetHandle(qs->session,
479  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
480  if (ret < 0) {
481  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
482  }
483  }
484 
485  return 0;
486 }
487 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
488 
489 #if QSV_ONEVPL
490 static int qsv_new_mfx_loader(AVCodecContext *avctx,
491  mfxIMPL implementation,
492  mfxVersion *pver,
493  void **ploader)
494 {
495  mfxStatus sts;
496  mfxLoader loader = NULL;
497  mfxConfig cfg;
498  mfxVariant impl_value;
499 
500  loader = MFXLoad();
501  if (!loader) {
502  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
503  goto fail;
504  }
505 
506  /* Create configurations for implementation */
507  cfg = MFXCreateConfig(loader);
508  if (!cfg) {
509  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX configurations\n");
510  goto fail;
511  }
512 
513  impl_value.Type = MFX_VARIANT_TYPE_U32;
514  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
515  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
516  sts = MFXSetConfigFilterProperty(cfg,
517  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
518  if (sts != MFX_ERR_NONE) {
519  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
520  "property: %d\n", sts);
521  goto fail;
522  }
523 
524  impl_value.Type = MFX_VARIANT_TYPE_U32;
525  impl_value.Data.U32 = pver->Version;
526  sts = MFXSetConfigFilterProperty(cfg,
527  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
528  impl_value);
529  if (sts != MFX_ERR_NONE) {
530  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
531  "property: %d\n", sts);
532  goto fail;
533  }
534 
535  *ploader = loader;
536 
537  return 0;
538 
539 fail:
540  if (loader)
541  MFXUnload(loader);
542 
543  *ploader = NULL;
544  return AVERROR_UNKNOWN;
545 }
546 
547 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
548 {
549  mfxStatus sts;
550  mfxSession session = NULL;
551  uint32_t impl_idx = 0;
552 
553  while (1) {
554  /* Enumerate all implementations */
555  mfxImplDescription *impl_desc;
556 
557  sts = MFXEnumImplementations(loader, impl_idx,
558  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
559  (mfxHDL *)&impl_desc);
560  /* Failed to find an available implementation */
561  if (sts == MFX_ERR_NOT_FOUND)
562  break;
563  else if (sts != MFX_ERR_NONE) {
564  impl_idx++;
565  continue;
566  }
567 
568  sts = MFXCreateSession(loader, impl_idx, &session);
569  MFXDispReleaseImplDescription(loader, impl_desc);
570  if (sts == MFX_ERR_NONE)
571  break;
572 
573  impl_idx++;
574  }
575 
576  if (sts != MFX_ERR_NONE) {
577  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
578  goto fail;
579  }
580 
581  *psession = session;
582 
583  return 0;
584 
585 fail:
586  if (session)
587  MFXClose(session);
588 
589  *psession = NULL;
590  return AVERROR_UNKNOWN;
591 }
592 
593 static int qsv_create_mfx_session(AVCodecContext *avctx,
594  mfxIMPL implementation,
595  mfxVersion *pver,
596  int gpu_copy,
597  mfxSession *psession,
598  void **ploader)
599 {
600  mfxLoader loader = NULL;
601 
602  /* Don't create a new MFX loader if the input loader is valid */
603  if (*ploader == NULL) {
604  av_log(avctx, AV_LOG_VERBOSE,
605  "Use Intel(R) oneVPL to create MFX session, the required "
606  "implementation version is %d.%d\n",
607  pver->Major, pver->Minor);
608 
609  if (qsv_new_mfx_loader(avctx, implementation, pver, (void **)&loader))
610  goto fail;
611 
612  av_assert0(loader);
613  } else {
614  av_log(avctx, AV_LOG_VERBOSE,
615  "Use Intel(R) oneVPL to create MFX session with the specified MFX loader\n");
616 
617  loader = *ploader;
618  }
619 
620  if (qsv_create_mfx_session_from_loader(avctx, loader, psession))
621  goto fail;
622 
623  if (!*ploader)
624  *ploader = loader;
625 
626  return 0;
627 
628 fail:
629  if (!*ploader && loader)
630  MFXUnload(loader);
631 
632  return AVERROR_UNKNOWN;
633 }
634 
635 #else
636 
638  mfxIMPL implementation,
639  mfxVersion *pver,
640  int gpu_copy,
641  mfxSession *psession,
642  void **ploader)
643 {
644  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
645  mfxSession session = NULL;
646  mfxStatus sts;
647 
648  av_log(avctx, AV_LOG_VERBOSE,
649  "Use Intel(R) Media SDK to create MFX session, the required "
650  "implementation version is %d.%d\n",
651  pver->Major, pver->Minor);
652 
653  *psession = NULL;
654  *ploader = NULL;
655 
656  init_par.GPUCopy = gpu_copy;
657  init_par.Implementation = implementation;
658  init_par.Version = *pver;
659  sts = MFXInitEx(init_par, &session);
660  if (sts < 0)
661  return ff_qsv_print_error(avctx, sts,
662  "Error initializing a MFX session");
663  else if (sts > 0) {
664  ff_qsv_print_warning(avctx, sts,
665  "Warning in MFX initialization");
666  return AVERROR_UNKNOWN;
667  }
668 
669  *psession = session;
670 
671  return 0;
672 }
673 
674 #endif
675 
677  const char *load_plugins, int gpu_copy)
678 {
679  mfxIMPL impls[] = {
680 #if CONFIG_D3D11VA
681  MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11,
682 #endif
683  MFX_IMPL_AUTO_ANY
684  };
685  mfxIMPL impl;
686  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
687 
688  const char *desc;
689  int ret;
690 
691  for (int i = 0; i < FF_ARRAY_ELEMS(impls); i++) {
692  ret = qsv_create_mfx_session(avctx, impls[i], &ver, gpu_copy, &qs->session,
693  &qs->loader);
694 
695  if (ret == 0)
696  break;
697 
698  if (i == FF_ARRAY_ELEMS(impls) - 1)
699  return ret;
700  else
701  av_log(avctx, AV_LOG_ERROR, "The current mfx implementation is not "
702  "supported, try next mfx implementation.\n");
703  }
704 
705 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
706  ret = ff_qsv_set_display_handle(avctx, qs);
707  if (ret < 0)
708  return ret;
709 #endif
710 
711  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
712  if (ret < 0) {
713  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
714  return ret;
715  }
716 
717  ret = MFXQueryIMPL(qs->session, &impl);
718  if (ret != MFX_ERR_NONE)
719  return ff_qsv_print_error(avctx, ret,
720  "Error querying the session attributes");
721 
722  switch (MFX_IMPL_BASETYPE(impl)) {
723  case MFX_IMPL_SOFTWARE:
724  desc = "software";
725  break;
726  case MFX_IMPL_HARDWARE:
727  case MFX_IMPL_HARDWARE2:
728  case MFX_IMPL_HARDWARE3:
729  case MFX_IMPL_HARDWARE4:
730  desc = "hardware accelerated";
731  break;
732  default:
733  desc = "unknown";
734  }
735 
736  av_log(avctx, AV_LOG_VERBOSE,
737  "Initialized an internal MFX session using %s implementation\n",
738  desc);
739 
740  return 0;
741 }
742 
743 static void mids_buf_free(void *opaque, uint8_t *data)
744 {
745  AVBufferRef *hw_frames_ref = opaque;
746  av_buffer_unref(&hw_frames_ref);
747  av_freep(&data);
748 }
749 
750 static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
751 {
752  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
753  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
754  int nb_surfaces = frames_hwctx->nb_surfaces;
755 
756  AVBufferRef *mids_buf, *hw_frames_ref1;
757  QSVMid *mids;
758  int i;
759 
760  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
761  if (!hw_frames_ref1)
762  return NULL;
763 
764  mids = av_calloc(nb_surfaces, sizeof(*mids));
765  if (!mids) {
766  av_buffer_unref(&hw_frames_ref1);
767  return NULL;
768  }
769 
770  mids_buf = av_buffer_create((uint8_t*)mids, nb_surfaces * sizeof(*mids),
771  mids_buf_free, hw_frames_ref1, 0);
772  if (!mids_buf) {
773  av_buffer_unref(&hw_frames_ref1);
774  av_freep(&mids);
775  return NULL;
776  }
777 
778  for (i = 0; i < nb_surfaces; i++) {
779  QSVMid *mid = &mids[i];
780  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
781  mid->hw_frames_ref = hw_frames_ref1;
782  }
783 
784  return mids_buf;
785 }
786 
787 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
788  AVBufferRef *mids_buf)
789 {
790  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
791  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
792  QSVMid *mids = (QSVMid*)mids_buf->data;
793  int nb_surfaces = frames_hwctx->nb_surfaces;
794  int i;
795 
796  // the allocated size of the array is two larger than the number of
797  // surfaces, we store the references to the frames context and the
798  // QSVMid array there
799  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
800  if (!resp->mids)
801  return AVERROR(ENOMEM);
802 
803  for (i = 0; i < nb_surfaces; i++)
804  resp->mids[i] = &mids[i];
805  resp->NumFrameActual = nb_surfaces;
806 
807  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
808  if (!resp->mids[resp->NumFrameActual]) {
809  av_freep(&resp->mids);
810  return AVERROR(ENOMEM);
811  }
812 
813  resp->mids[resp->NumFrameActual + 1] = av_buffer_ref(mids_buf);
814  if (!resp->mids[resp->NumFrameActual + 1]) {
815  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
816  av_freep(&resp->mids);
817  return AVERROR(ENOMEM);
818  }
819 
820  return 0;
821 }
822 
823 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
824  mfxFrameAllocResponse *resp)
825 {
826  QSVFramesContext *ctx = pthis;
827  int ret;
828 
829  /* this should only be called from an encoder or decoder and
830  * only allocates video memory frames */
831  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
832  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
833  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
834  return MFX_ERR_UNSUPPORTED;
835 
836  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
837  /* external frames -- fill from the caller-supplied frames context */
838  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
839  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
840  mfxFrameInfo *i = &req->Info;
841  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
842 
843  if (i->Width > i1->Width || i->Height > i1->Height ||
844  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
845  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
846  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
847  i->Width, i->Height, i->FourCC, i->ChromaFormat,
848  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
849  return MFX_ERR_UNSUPPORTED;
850  }
851 
852  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
853  if (ret < 0) {
854  av_log(ctx->logctx, AV_LOG_ERROR,
855  "Error filling an external frame allocation request\n");
856  return MFX_ERR_MEMORY_ALLOC;
857  }
858  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
859  /* internal frames -- allocate a new hw frames context */
860  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
861  mfxFrameInfo *i = &req->Info;
862 
863  AVBufferRef *frames_ref, *mids_buf;
864  AVHWFramesContext *frames_ctx;
865  AVQSVFramesContext *frames_hwctx;
866 
867  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
868  if (!frames_ref)
869  return MFX_ERR_MEMORY_ALLOC;
870 
871  frames_ctx = (AVHWFramesContext*)frames_ref->data;
872  frames_hwctx = frames_ctx->hwctx;
873 
874  frames_ctx->format = AV_PIX_FMT_QSV;
875  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
876  frames_ctx->width = i->Width;
877  frames_ctx->height = i->Height;
878  frames_ctx->initial_pool_size = req->NumFrameSuggested;
879 
880  frames_hwctx->frame_type = req->Type;
881 
882  ret = av_hwframe_ctx_init(frames_ref);
883  if (ret < 0) {
884  av_log(ctx->logctx, AV_LOG_ERROR,
885  "Error initializing a frames context for an internal frame "
886  "allocation request\n");
887  av_buffer_unref(&frames_ref);
888  return MFX_ERR_MEMORY_ALLOC;
889  }
890 
891  mids_buf = qsv_create_mids(frames_ref);
892  if (!mids_buf) {
893  av_buffer_unref(&frames_ref);
894  return MFX_ERR_MEMORY_ALLOC;
895  }
896 
897  ret = qsv_setup_mids(resp, frames_ref, mids_buf);
898  av_buffer_unref(&mids_buf);
899  av_buffer_unref(&frames_ref);
900  if (ret < 0) {
901  av_log(ctx->logctx, AV_LOG_ERROR,
902  "Error filling an internal frame allocation request\n");
903  return MFX_ERR_MEMORY_ALLOC;
904  }
905  } else {
906  return MFX_ERR_UNSUPPORTED;
907  }
908 
909  return MFX_ERR_NONE;
910 }
911 
912 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
913 {
914  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
915  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual + 1]);
916  av_freep(&resp->mids);
917  return MFX_ERR_NONE;
918 }
919 
920 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
921 {
922  QSVMid *qsv_mid = mid;
923  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
924  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
925  int ret;
926 
927  if (qsv_mid->locked_frame)
928  return MFX_ERR_UNDEFINED_BEHAVIOR;
929 
930  /* Allocate a system memory frame that will hold the mapped data. */
931  qsv_mid->locked_frame = av_frame_alloc();
932  if (!qsv_mid->locked_frame)
933  return MFX_ERR_MEMORY_ALLOC;
934  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
935 
936  /* wrap the provided handle in a hwaccel AVFrame */
937  qsv_mid->hw_frame = av_frame_alloc();
938  if (!qsv_mid->hw_frame)
939  goto fail;
940 
941  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
942  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
943 
944  // doesn't really matter what buffer is used here
945  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
946  if (!qsv_mid->hw_frame->buf[0])
947  goto fail;
948 
949  qsv_mid->hw_frame->width = hw_frames_ctx->width;
950  qsv_mid->hw_frame->height = hw_frames_ctx->height;
951 
952  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
953  if (!qsv_mid->hw_frame->hw_frames_ctx)
954  goto fail;
955 
956  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
957  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
958 
959  /* map the data to the system memory */
960  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
962  if (ret < 0)
963  goto fail;
964 
965  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
966  ptr->Y = qsv_mid->locked_frame->data[0];
967  ptr->U = qsv_mid->locked_frame->data[1];
968  ptr->V = qsv_mid->locked_frame->data[1] + 1;
969 
970  return MFX_ERR_NONE;
971 fail:
972  av_frame_free(&qsv_mid->hw_frame);
973  av_frame_free(&qsv_mid->locked_frame);
974  return MFX_ERR_MEMORY_ALLOC;
975 }
976 
977 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
978 {
979  QSVMid *qsv_mid = mid;
980 
981  av_frame_free(&qsv_mid->locked_frame);
982  av_frame_free(&qsv_mid->hw_frame);
983 
984  return MFX_ERR_NONE;
985 }
986 
987 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
988 {
989  QSVMid *qsv_mid = (QSVMid*)mid;
990  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
991  mfxHDLPair *pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
992 
993  pair_dst->first = pair_src->first;
994 
995  if (pair_src->second != (mfxMemId)MFX_INFINITE)
996  pair_dst->second = pair_src->second;
997  return MFX_ERR_NONE;
998 }
999 
1000 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
1001  AVBufferRef *device_ref, const char *load_plugins,
1002  int gpu_copy)
1003 {
1004  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
1005  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
1006  mfxSession parent_session = device_hwctx->session;
1007  void *loader = device_hwctx->loader;
1008  mfxHDL handle = NULL;
1009  int hw_handle_supported = 0;
1010 
1011  mfxSession session;
1012  mfxVersion ver;
1013  mfxIMPL impl;
1014  mfxHandleType handle_type;
1015  mfxStatus err;
1016  int ret;
1017 
1018  err = MFXQueryIMPL(parent_session, &impl);
1019  if (err == MFX_ERR_NONE)
1020  err = MFXQueryVersion(parent_session, &ver);
1021  if (err != MFX_ERR_NONE)
1022  return ff_qsv_print_error(avctx, err,
1023  "Error querying the session attributes");
1024 
1025  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
1026  handle_type = MFX_HANDLE_VA_DISPLAY;
1027  hw_handle_supported = 1;
1028  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
1029  handle_type = MFX_HANDLE_D3D11_DEVICE;
1030  hw_handle_supported = 1;
1031  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
1032  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1033  hw_handle_supported = 1;
1034  }
1035 
1036  if (hw_handle_supported) {
1037  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
1038  if (err != MFX_ERR_NONE) {
1039  return ff_qsv_print_error(avctx, err,
1040  "Error getting handle session");
1041  }
1042  }
1043  if (!handle) {
1044  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
1045  "from the session\n");
1046  }
1047 
1048  ret = qsv_create_mfx_session(avctx, impl, &ver, gpu_copy, &session,
1049  &loader);
1050  if (ret)
1051  return ret;
1052 
1053  if (handle) {
1054  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
1055  if (err != MFX_ERR_NONE)
1056  return ff_qsv_print_error(avctx, err,
1057  "Error setting a HW handle");
1058  }
1059 
1060  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1061  err = MFXJoinSession(parent_session, session);
1062  if (err != MFX_ERR_NONE)
1063  return ff_qsv_print_error(avctx, err,
1064  "Error joining session");
1065  }
1066 
1067  ret = qsv_load_plugins(session, load_plugins, avctx);
1068  if (ret < 0) {
1069  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
1070  return ret;
1071  }
1072 
1073  *psession = session;
1074  return 0;
1075 }
1076 
1077 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
1078  QSVFramesContext *qsv_frames_ctx,
1079  const char *load_plugins, int opaque, int gpu_copy)
1080 {
1081  mfxFrameAllocator frame_allocator = {
1082  .pthis = qsv_frames_ctx,
1083  .Alloc = qsv_frame_alloc,
1084  .Lock = qsv_frame_lock,
1085  .Unlock = qsv_frame_unlock,
1086  .GetHDL = qsv_frame_get_hdl,
1087  .Free = qsv_frame_free,
1088  };
1089 
1090  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
1091  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
1092 
1093  mfxSession session;
1094  mfxStatus err;
1095 
1096  int ret;
1097 
1098  ret = ff_qsv_init_session_device(avctx, &session,
1099  frames_ctx->device_ref, load_plugins, gpu_copy);
1100  if (ret < 0)
1101  return ret;
1102 
1103  if (!opaque) {
1104  qsv_frames_ctx->logctx = avctx;
1105 
1106  /* allocate the memory ids for the external frames */
1107  av_buffer_unref(&qsv_frames_ctx->mids_buf);
1108  qsv_frames_ctx->mids_buf = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
1109  if (!qsv_frames_ctx->mids_buf)
1110  return AVERROR(ENOMEM);
1111  qsv_frames_ctx->mids = (QSVMid*)qsv_frames_ctx->mids_buf->data;
1112  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
1113 
1114  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
1115  if (err != MFX_ERR_NONE)
1116  return ff_qsv_print_error(avctx, err,
1117  "Error setting a frame allocator");
1118  }
1119 
1120  *psession = session;
1121  return 0;
1122 }
1123 
1125 {
1126  if (qs->session) {
1127  MFXClose(qs->session);
1128  qs->session = NULL;
1129  }
1130 
1131  if (qs->loader) {
1132  MFXUnload(qs->loader);
1133  qs->loader = NULL;
1134  }
1135 
1136 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
1137  av_buffer_unref(&qs->va_device_ref);
1138 #endif
1139  return 0;
1140 }
1141 
1143  mfxExtBuffer * param)
1144 {
1145  int i;
1146 
1147  for (i = 0; i < frame->num_ext_params; i++) {
1148  mfxExtBuffer *ext_buffer = frame->ext_param[i];
1149 
1150  if (ext_buffer->BufferId == param->BufferId) {
1151  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
1152  "added\n");
1153  return;
1154  }
1155  }
1156 
1157  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
1158  frame->ext_param[frame->num_ext_params] = param;
1159  frame->num_ext_params++;
1160  frame->surface.Data.NumExtParam = frame->num_ext_params;
1161  } else {
1162  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
1163  "have enough space\n");
1164  }
1165 
1166 
1167 }
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:57
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:977
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:124
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2422
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:47
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:115
AVFieldOrder
AVFieldOrder
Definition: defs.h:198
averr
int averr
Definition: qsv.c:118
QSVMid::locked_frame
AVFrame * locked_frame
Definition: qsv_internal.h:74
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: defs.h:200
AVPictureType
AVPictureType
Definition: avutil.h:277
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:1124
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:374
AVFrame::width
int width
Definition: frame.h:446
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:343
data
const char data[16]
Definition: mxf.c:148
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:534
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
qsv_iopatterns
static const struct @149 qsv_iopatterns[]
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:34
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:587
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:1000
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:528
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: defs.h:201
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:282
fail
#define fail()
Definition: checkasm.h:179
MFXUnload
#define MFXUnload(a)
Definition: qsv.c:50
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:403
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
desc
const char * desc
Definition: qsv.c:85
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:532
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:193
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: defs.h:199
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:48
QSVMid::hw_frame
AVFrame * hw_frame
Definition: qsv_internal.h:75
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:220
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:63
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
ctx
AVFormatContext * ctx
Definition: movenc.c:49
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:387
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
if
if(ret)
Definition: filter_design.txt:179
mids_buf_free
static void mids_buf_free(void *opaque, uint8_t *data)
Definition: qsv.c:743
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:1077
QSVFrame
Definition: qsv_internal.h:79
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:280
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:283
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:99
QSVFramesContext::mids_buf
AVBufferRef * mids_buf
Definition: qsv_internal.h:122
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:284
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:384
mfxerr
mfxStatus mfxerr
Definition: qsv.c:117
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:529
shift
static int shift(int a, int b)
Definition: bonk.c:261
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:71
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:278
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:533
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:84
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:72
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
Definition: qsv_internal.h:123
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:59
hwcontext_qsv.h
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
Definition: qsv.c:223
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:356
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AV_FIELD_BB
@ AV_FIELD_BB
Bottom coded first, bottom displayed first.
Definition: defs.h:202
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
QSVMid::surf
mfxFrameSurface1 surf
Definition: qsv_internal.h:76
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:536
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:122
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:169
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:264
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:912
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVSession
Definition: qsv_internal.h:105
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:53
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:987
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:725
AVCodecContext
main external API structure.
Definition: avcodec.h:445
AVFrame::height
int height
Definition: frame.h:446
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:823
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:143
qsv_create_mfx_session
static int qsv_create_mfx_session(AVCodecContext *avctx, mfxIMPL implementation, mfxVersion *pver, int gpu_copy, mfxSession *psession, void **ploader)
Definition: qsv.c:637
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:39
QSVSession::session
mfxSession session
Definition: qsv_internal.h:106
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:202
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:528
qsv_errors
static const struct @150 qsv_errors[]
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
Definition: qsv.c:787
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:116
QSVFramesContext
Definition: qsv_internal.h:114
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
qsv_create_mids
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:750
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:535
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:192
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
QSVSession::loader
void * loader
Definition: qsv_internal.h:111
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:1142
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
QSVMid
Definition: qsv_internal.h:70
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:184
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:240
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:676
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:920