FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfxvideo.h>
22 #include <mfxjpeg.h>
23 #include <mfxvp8.h>
24 
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/common.h"
30 #include "libavutil/error.h"
31 #include "libavutil/hwcontext.h"
33 #include "libavutil/avassert.h"
34 #include "libavutil/mem.h"
35 
36 #include "avcodec.h"
37 #include "qsv_internal.h"
38 #include "refstruct.h"
39 
40 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
41 #define QSV_HAVE_USER_PLUGIN !QSV_ONEVPL
42 #define QSV_HAVE_AUDIO !QSV_ONEVPL
43 
44 #if QSV_HAVE_USER_PLUGIN
45 #include <mfxplugin.h>
46 #endif
47 
48 #if QSV_ONEVPL
49 #include <mfxdispatcher.h>
50 #else
51 #define MFXUnload(a) do { } while(0)
52 #endif
53 
55 {
56  switch (codec_id) {
57  case AV_CODEC_ID_H264:
58  return MFX_CODEC_AVC;
59  case AV_CODEC_ID_HEVC:
60  return MFX_CODEC_HEVC;
63  return MFX_CODEC_MPEG2;
64  case AV_CODEC_ID_VC1:
65  return MFX_CODEC_VC1;
66  case AV_CODEC_ID_VP8:
67  return MFX_CODEC_VP8;
68  case AV_CODEC_ID_MJPEG:
69  return MFX_CODEC_JPEG;
70  case AV_CODEC_ID_VP9:
71  return MFX_CODEC_VP9;
72 #if QSV_VERSION_ATLEAST(1, 34)
73  case AV_CODEC_ID_AV1:
74  return MFX_CODEC_AV1;
75 #endif
76 
77  default:
78  break;
79  }
80 
81  return AVERROR(ENOSYS);
82 }
83 
84 static const struct {
86  const char *desc;
87 } qsv_iopatterns[] = {
88  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
89  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
90 #if QSV_HAVE_OPAQUE
91  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
92 #endif
93  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
94  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
95 #if QSV_HAVE_OPAQUE
96  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
97 #endif
98 };
99 
100 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
101  const char *extra_string)
102 {
103  const char *desc = NULL;
104 
105  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
107  desc = qsv_iopatterns[i].desc;
108  }
109  }
110  if (!desc)
111  desc = "unknown iopattern";
112 
113  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
114  return 0;
115 }
116 
117 static const struct {
118  mfxStatus mfxerr;
119  int averr;
120  const char *desc;
121 } qsv_errors[] = {
122  { MFX_ERR_NONE, 0, "success" },
123  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
124  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
125  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
126  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
127  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
128  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
129  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
130  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
131  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
132  /* the following 3 errors should always be handled explicitly, so those "mappings"
133  * are for completeness only */
134  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
135  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
136  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
137  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
138  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
139  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
140  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
141  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
142  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
143 #if QSV_HAVE_AUDIO
144  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
145  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
146 #endif
147  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
148  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
149 
150  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
151  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
152  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
153  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
154  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
155  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
156  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
157  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
158 #if QSV_HAVE_AUDIO
159  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
160 #endif
161 
162 #if QSV_VERSION_ATLEAST(1, 31)
163  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
164 #endif
165 };
166 
167 /**
168  * Convert a libmfx error code into an FFmpeg error code.
169  */
170 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
171 {
172  int i;
173  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
174  if (qsv_errors[i].mfxerr == mfx_err) {
175  if (desc)
176  *desc = qsv_errors[i].desc;
177  return qsv_errors[i].averr;
178  }
179  }
180  if (desc)
181  *desc = "unknown error";
182  return AVERROR_UNKNOWN;
183 }
184 
185 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
186  const char *error_string)
187 {
188  const char *desc;
189  int ret = qsv_map_error(err, &desc);
190  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
191  return ret;
192 }
193 
194 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
195  const char *warning_string)
196 {
197  const char *desc;
198  int ret = qsv_map_error(err, &desc);
199  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
200  return ret;
201 }
202 
204 {
205  switch (fourcc) {
206  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
207  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
208  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
209  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
210  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
211  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
212  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
213  case MFX_FOURCC_AYUV: return AV_PIX_FMT_VUYX;
214  case MFX_FOURCC_Y410: return AV_PIX_FMT_XV30;
215 #if QSV_VERSION_ATLEAST(1, 31)
216  case MFX_FOURCC_P016: return AV_PIX_FMT_P012;
217  case MFX_FOURCC_Y216: return AV_PIX_FMT_Y212;
218  case MFX_FOURCC_Y416: return AV_PIX_FMT_XV36;
219 #endif
220  }
221  return AV_PIX_FMT_NONE;
222 }
223 
224 int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
225 {
226  switch (format) {
227  case AV_PIX_FMT_YUV420P:
228  case AV_PIX_FMT_YUVJ420P:
229  case AV_PIX_FMT_NV12:
230  *fourcc = MFX_FOURCC_NV12;
231  *shift = 0;
232  return AV_PIX_FMT_NV12;
234  case AV_PIX_FMT_P010:
235  *fourcc = MFX_FOURCC_P010;
236  *shift = 1;
237  return AV_PIX_FMT_P010;
238  case AV_PIX_FMT_X2RGB10:
239  *fourcc = MFX_FOURCC_A2RGB10;
240  *shift = 1;
241  return AV_PIX_FMT_X2RGB10;
242  case AV_PIX_FMT_BGRA:
243  *fourcc = MFX_FOURCC_RGB4;
244  *shift = 0;
245  return AV_PIX_FMT_BGRA;
246  case AV_PIX_FMT_YUV422P:
247  case AV_PIX_FMT_YUYV422:
248  *fourcc = MFX_FOURCC_YUY2;
249  *shift = 0;
250  return AV_PIX_FMT_YUYV422;
252  case AV_PIX_FMT_Y210:
253  *fourcc = MFX_FOURCC_Y210;
254  *shift = 1;
255  return AV_PIX_FMT_Y210;
256  case AV_PIX_FMT_VUYX:
257  *fourcc = MFX_FOURCC_AYUV;
258  *shift = 0;
259  return AV_PIX_FMT_VUYX;
260  case AV_PIX_FMT_XV30:
261  *fourcc = MFX_FOURCC_Y410;
262  *shift = 0;
263  return AV_PIX_FMT_XV30;
264 #if QSV_VERSION_ATLEAST(1, 31)
265  case AV_PIX_FMT_P012:
266  *fourcc = MFX_FOURCC_P016;
267  *shift = 1;
268  return AV_PIX_FMT_P012;
269  case AV_PIX_FMT_Y212:
270  *fourcc = MFX_FOURCC_Y216;
271  *shift = 1;
272  return AV_PIX_FMT_Y212;
273  case AV_PIX_FMT_XV36:
274  *fourcc = MFX_FOURCC_Y416;
275  *shift = 1;
276  return AV_PIX_FMT_XV36;
277 #endif
278  default:
279  return AVERROR(ENOSYS);
280  }
281 }
282 
283 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
284 {
285  switch (frame->format) {
286  case AV_PIX_FMT_NV12:
287  case AV_PIX_FMT_P010:
288  case AV_PIX_FMT_P012:
289  surface->Data.Y = frame->data[0];
290  surface->Data.UV = frame->data[1];
291  /* The SDK checks Data.V when using system memory for VP9 encoding */
292  surface->Data.V = surface->Data.UV + 1;
293  break;
295  case AV_PIX_FMT_BGRA:
296  surface->Data.B = frame->data[0];
297  surface->Data.G = frame->data[0] + 1;
298  surface->Data.R = frame->data[0] + 2;
299  surface->Data.A = frame->data[0] + 3;
300  break;
301  case AV_PIX_FMT_YUYV422:
302  surface->Data.Y = frame->data[0];
303  surface->Data.U = frame->data[0] + 1;
304  surface->Data.V = frame->data[0] + 3;
305  break;
306 
307  case AV_PIX_FMT_Y210:
308  case AV_PIX_FMT_Y212:
309  surface->Data.Y16 = (mfxU16 *)frame->data[0];
310  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
311  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
312  break;
313 
314  case AV_PIX_FMT_VUYX:
315  surface->Data.V = frame->data[0];
316  surface->Data.U = frame->data[0] + 1;
317  surface->Data.Y = frame->data[0] + 2;
318  // Only set Data.A to a valid address, the SDK doesn't
319  // use the value from the frame.
320  surface->Data.A = frame->data[0] + 3;
321  break;
322 
323  case AV_PIX_FMT_XV30:
324  surface->Data.U = frame->data[0];
325  break;
326 
327  case AV_PIX_FMT_XV36:
328  surface->Data.U = frame->data[0];
329  surface->Data.Y = frame->data[0] + 2;
330  surface->Data.V = frame->data[0] + 4;
331  // Only set Data.A to a valid address, the SDK doesn't
332  // use the value from the frame.
333  surface->Data.A = frame->data[0] + 6;
334  break;
335 
336  default:
337  return AVERROR(ENOSYS);
338  }
339  surface->Data.PitchLow = frame->linesize[0];
340 
341  return 0;
342 }
343 
345 {
346  int i;
347  for (i = 0; i < ctx->nb_mids; i++) {
348  QSVMid *mid = &ctx->mids[i];
349  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
350  if ((mid->handle_pair->first == pair->first) &&
351  (mid->handle_pair->second == pair->second))
352  return i;
353  }
354  return AVERROR_BUG;
355 }
356 
357 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
358 {
360  switch (mfx_pic_struct & 0xF) {
361  case MFX_PICSTRUCT_PROGRESSIVE:
363  break;
364  case MFX_PICSTRUCT_FIELD_TFF:
365  field = AV_FIELD_TT;
366  break;
367  case MFX_PICSTRUCT_FIELD_BFF:
368  field = AV_FIELD_BB;
369  break;
370  }
371 
372  return field;
373 }
374 
375 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
376 {
377  enum AVPictureType type;
378  switch (mfx_pic_type & 0x7) {
379  case MFX_FRAMETYPE_I:
380  if (mfx_pic_type & MFX_FRAMETYPE_S)
382  else
384  break;
385  case MFX_FRAMETYPE_B:
387  break;
388  case MFX_FRAMETYPE_P:
389  if (mfx_pic_type & MFX_FRAMETYPE_S)
391  else
393  break;
394  case MFX_FRAMETYPE_UNKNOWN:
396  break;
397  default:
398  av_assert0(0);
399  }
400 
401  return type;
402 }
403 
404 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
405  void *logctx)
406 {
407 #if QSV_HAVE_USER_PLUGIN
408  if (!load_plugins || !*load_plugins)
409  return 0;
410 
411  while (*load_plugins) {
412  mfxPluginUID uid;
413  mfxStatus ret;
414  int i, err = 0;
415 
416  char *plugin = av_get_token(&load_plugins, ":");
417  if (!plugin)
418  return AVERROR(ENOMEM);
419  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
420  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
421  err = AVERROR(EINVAL);
422  goto load_plugin_fail;
423  }
424 
425  for (i = 0; i < sizeof(uid.Data); i++) {
426  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
427  if (err != 1) {
428  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
429  err = AVERROR(EINVAL);
430  goto load_plugin_fail;
431  }
432 
433  }
434 
435  ret = MFXVideoUSER_Load(session, &uid, 1);
436  if (ret < 0) {
437  char errorbuf[128];
438  snprintf(errorbuf, sizeof(errorbuf),
439  "Could not load the requested plugin '%s'", plugin);
440  err = ff_qsv_print_error(logctx, ret, errorbuf);
441  goto load_plugin_fail;
442  }
443 
444  if (*load_plugins)
445  load_plugins++;
446 load_plugin_fail:
447  av_freep(&plugin);
448  if (err < 0)
449  return err;
450  }
451 #endif
452 
453  return 0;
454 
455 }
456 
457 //This code is only required for Linux since a display handle is required.
458 //For Windows the session is complete and ready to use.
459 
460 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
461 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
462 {
463  AVDictionary *child_device_opts = NULL;
464  AVVAAPIDeviceContext *hwctx;
465  int ret;
466 
467  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
468  av_dict_set(&child_device_opts, "driver", "iHD", 0);
469 
470  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
471  av_dict_free(&child_device_opts);
472  if (ret < 0) {
473  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
474  return ret;
475  } else {
476  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
477  hwctx = qs->va_device_ctx->hwctx;
478 
479  ret = MFXVideoCORE_SetHandle(qs->session,
480  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
481  if (ret < 0) {
482  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
483  }
484  }
485 
486  return 0;
487 }
488 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
489 
490 #if QSV_ONEVPL
491 static int qsv_new_mfx_loader(AVCodecContext *avctx,
492  mfxIMPL implementation,
493  mfxVersion *pver,
494  void **ploader)
495 {
496  mfxStatus sts;
497  mfxLoader loader = NULL;
498  mfxConfig cfg;
499  mfxVariant impl_value = {0};
500 
501  loader = MFXLoad();
502  if (!loader) {
503  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
504  goto fail;
505  }
506 
507  /* Create configurations for implementation */
508  cfg = MFXCreateConfig(loader);
509  if (!cfg) {
510  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX configurations\n");
511  goto fail;
512  }
513 
514  impl_value.Type = MFX_VARIANT_TYPE_U32;
515  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
516  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
517  sts = MFXSetConfigFilterProperty(cfg,
518  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
519  if (sts != MFX_ERR_NONE) {
520  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
521  "property: %d\n", sts);
522  goto fail;
523  }
524 
525  impl_value.Type = MFX_VARIANT_TYPE_U32;
526  impl_value.Data.U32 = pver->Version;
527  sts = MFXSetConfigFilterProperty(cfg,
528  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
529  impl_value);
530  if (sts != MFX_ERR_NONE) {
531  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
532  "property: %d\n", sts);
533  goto fail;
534  }
535 
536  *ploader = loader;
537 
538  return 0;
539 
540 fail:
541  if (loader)
542  MFXUnload(loader);
543 
544  *ploader = NULL;
545  return AVERROR_UNKNOWN;
546 }
547 
548 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
549 {
550  mfxStatus sts;
551  mfxSession session = NULL;
552  uint32_t impl_idx = 0;
553 
554  while (1) {
555  /* Enumerate all implementations */
556  mfxImplDescription *impl_desc;
557 
558  sts = MFXEnumImplementations(loader, impl_idx,
559  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
560  (mfxHDL *)&impl_desc);
561  /* Failed to find an available implementation */
562  if (sts == MFX_ERR_NOT_FOUND)
563  break;
564  else if (sts != MFX_ERR_NONE) {
565  impl_idx++;
566  continue;
567  }
568 
569  sts = MFXCreateSession(loader, impl_idx, &session);
570  MFXDispReleaseImplDescription(loader, impl_desc);
571  if (sts == MFX_ERR_NONE)
572  break;
573 
574  impl_idx++;
575  }
576 
577  if (sts != MFX_ERR_NONE) {
578  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
579  goto fail;
580  }
581 
582  *psession = session;
583 
584  return 0;
585 
586 fail:
587  if (session)
588  MFXClose(session);
589 
590  *psession = NULL;
591  return AVERROR_UNKNOWN;
592 }
593 
594 static int qsv_create_mfx_session(AVCodecContext *avctx,
595  mfxIMPL implementation,
596  mfxVersion *pver,
597  int gpu_copy,
598  mfxSession *psession,
599  void **ploader)
600 {
601  mfxLoader loader = NULL;
602 
603  /* Don't create a new MFX loader if the input loader is valid */
604  if (*ploader == NULL) {
605  av_log(avctx, AV_LOG_VERBOSE,
606  "Use Intel(R) oneVPL to create MFX session, the required "
607  "implementation version is %d.%d\n",
608  pver->Major, pver->Minor);
609 
610  if (qsv_new_mfx_loader(avctx, implementation, pver, (void **)&loader))
611  goto fail;
612 
613  av_assert0(loader);
614  } else {
615  av_log(avctx, AV_LOG_VERBOSE,
616  "Use Intel(R) oneVPL to create MFX session with the specified MFX loader\n");
617 
618  loader = *ploader;
619  }
620 
621  if (qsv_create_mfx_session_from_loader(avctx, loader, psession))
622  goto fail;
623 
624  if (!*ploader)
625  *ploader = loader;
626 
627  return 0;
628 
629 fail:
630  if (!*ploader && loader)
631  MFXUnload(loader);
632 
633  return AVERROR_UNKNOWN;
634 }
635 
636 #else
637 
639  mfxIMPL implementation,
640  mfxVersion *pver,
641  int gpu_copy,
642  mfxSession *psession,
643  void **ploader)
644 {
645  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
646  mfxSession session = NULL;
647  mfxStatus sts;
648 
649  av_log(avctx, AV_LOG_VERBOSE,
650  "Use Intel(R) Media SDK to create MFX session, the required "
651  "implementation version is %d.%d\n",
652  pver->Major, pver->Minor);
653 
654  *psession = NULL;
655  *ploader = NULL;
656 
657  init_par.GPUCopy = gpu_copy;
658  init_par.Implementation = implementation;
659  init_par.Version = *pver;
660  sts = MFXInitEx(init_par, &session);
661  if (sts < 0)
662  return ff_qsv_print_error(avctx, sts,
663  "Error initializing a MFX session");
664  else if (sts > 0) {
665  ff_qsv_print_warning(avctx, sts,
666  "Warning in MFX initialization");
667  return AVERROR_UNKNOWN;
668  }
669 
670  *psession = session;
671 
672  return 0;
673 }
674 
675 #endif
676 
678  const char *load_plugins, int gpu_copy)
679 {
680  mfxIMPL impls[] = {
681 #if CONFIG_D3D11VA
682  MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11,
683 #endif
684  MFX_IMPL_AUTO_ANY
685  };
686  mfxIMPL impl;
687  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
688 
689  const char *desc;
690  int ret;
691 
692  for (int i = 0; i < FF_ARRAY_ELEMS(impls); i++) {
693  ret = qsv_create_mfx_session(avctx, impls[i], &ver, gpu_copy, &qs->session,
694  &qs->loader);
695 
696  if (ret == 0)
697  break;
698 
699  if (i == FF_ARRAY_ELEMS(impls) - 1)
700  return ret;
701  else
702  av_log(avctx, AV_LOG_ERROR, "The current mfx implementation is not "
703  "supported, try next mfx implementation.\n");
704  }
705 
706 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
707  ret = ff_qsv_set_display_handle(avctx, qs);
708  if (ret < 0)
709  return ret;
710 #endif
711 
712  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
713  if (ret < 0) {
714  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
715  return ret;
716  }
717 
718  ret = MFXQueryIMPL(qs->session, &impl);
719  if (ret != MFX_ERR_NONE)
720  return ff_qsv_print_error(avctx, ret,
721  "Error querying the session attributes");
722 
723  switch (MFX_IMPL_BASETYPE(impl)) {
724  case MFX_IMPL_SOFTWARE:
725  desc = "software";
726  break;
727  case MFX_IMPL_HARDWARE:
728  case MFX_IMPL_HARDWARE2:
729  case MFX_IMPL_HARDWARE3:
730  case MFX_IMPL_HARDWARE4:
731  desc = "hardware accelerated";
732  break;
733  default:
734  desc = "unknown";
735  }
736 
737  av_log(avctx, AV_LOG_VERBOSE,
738  "Initialized an internal MFX session using %s implementation\n",
739  desc);
740 
741  return 0;
742 }
743 
744 static void mids_buf_free(FFRefStructOpaque opaque, void *obj)
745 {
746  AVBufferRef *hw_frames_ref = opaque.nc;
747  av_buffer_unref(&hw_frames_ref);
748 }
749 
750 static QSVMid *qsv_create_mids(AVBufferRef *hw_frames_ref)
751 {
752  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
753  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
754  int nb_surfaces = frames_hwctx->nb_surfaces;
755 
756  AVBufferRef *hw_frames_ref1;
757  QSVMid *mids;
758  int i;
759 
760  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
761  if (!hw_frames_ref1)
762  return NULL;
763 
764  mids = ff_refstruct_alloc_ext(nb_surfaces * sizeof(*mids), 0,
765  hw_frames_ref1, mids_buf_free);
766  if (!mids) {
767  av_buffer_unref(&hw_frames_ref1);
768  return NULL;
769  }
770 
771  for (i = 0; i < nb_surfaces; i++) {
772  QSVMid *mid = &mids[i];
773  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
774  mid->hw_frames_ref = hw_frames_ref1;
775  }
776 
777  return mids;
778 }
779 
780 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
781  QSVMid *mids)
782 {
783  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
784  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
785  int nb_surfaces = frames_hwctx->nb_surfaces;
786  int i;
787 
788  // the allocated size of the array is two larger than the number of
789  // surfaces, we store the references to the frames context and the
790  // QSVMid array there
791  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
792  if (!resp->mids)
793  return AVERROR(ENOMEM);
794 
795  for (i = 0; i < nb_surfaces; i++)
796  resp->mids[i] = &mids[i];
797  resp->NumFrameActual = nb_surfaces;
798 
799  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
800  if (!resp->mids[resp->NumFrameActual]) {
801  av_freep(&resp->mids);
802  return AVERROR(ENOMEM);
803  }
804 
805  resp->mids[resp->NumFrameActual + 1] = ff_refstruct_ref(mids);
806 
807  return 0;
808 }
809 
810 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
811  mfxFrameAllocResponse *resp)
812 {
813  QSVFramesContext *ctx = pthis;
814  int ret;
815 
816  /* this should only be called from an encoder or decoder and
817  * only allocates video memory frames */
818  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
819  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
820  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
821  return MFX_ERR_UNSUPPORTED;
822 
823  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
824  /* external frames -- fill from the caller-supplied frames context */
825  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
826  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
827  mfxFrameInfo *i = &req->Info;
828  mfxFrameInfo *i1;
829 
830  if (!frames_hwctx->nb_surfaces) {
831  av_log(ctx->logctx, AV_LOG_DEBUG,
832  "Dynamic frame pools, no frame is pre-allocated\n");
833 
834  return MFX_ERR_NONE;
835  }
836 
837  i1 = &frames_hwctx->surfaces[0].Info;
838  if (i->Width > i1->Width || i->Height > i1->Height ||
839  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
840  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
841  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
842  i->Width, i->Height, i->FourCC, i->ChromaFormat,
843  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
844  return MFX_ERR_UNSUPPORTED;
845  }
846 
847  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids);
848  if (ret < 0) {
849  av_log(ctx->logctx, AV_LOG_ERROR,
850  "Error filling an external frame allocation request\n");
851  return MFX_ERR_MEMORY_ALLOC;
852  }
853  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
854  /* internal frames -- allocate a new hw frames context */
855  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
856  AVQSVFramesContext *ext_frames_hwctx = ext_frames_ctx->hwctx;
857  mfxFrameInfo *i = &req->Info;
858 
859  AVBufferRef *frames_ref;
860  QSVMid *mids;
861  AVHWFramesContext *frames_ctx;
862  AVQSVFramesContext *frames_hwctx;
863 
864  if (!ext_frames_hwctx->nb_surfaces)
865  return MFX_ERR_UNSUPPORTED;
866 
867  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
868  if (!frames_ref)
869  return MFX_ERR_MEMORY_ALLOC;
870 
871  frames_ctx = (AVHWFramesContext*)frames_ref->data;
872  frames_hwctx = frames_ctx->hwctx;
873 
874  frames_ctx->format = AV_PIX_FMT_QSV;
875  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
876  frames_ctx->width = i->Width;
877  frames_ctx->height = i->Height;
878  frames_ctx->initial_pool_size = req->NumFrameSuggested;
879 
880  frames_hwctx->frame_type = req->Type;
881 
882  ret = av_hwframe_ctx_init(frames_ref);
883  if (ret < 0) {
884  av_log(ctx->logctx, AV_LOG_ERROR,
885  "Error initializing a frames context for an internal frame "
886  "allocation request\n");
887  av_buffer_unref(&frames_ref);
888  return MFX_ERR_MEMORY_ALLOC;
889  }
890 
891  mids = qsv_create_mids(frames_ref);
892  if (!mids) {
893  av_buffer_unref(&frames_ref);
894  return MFX_ERR_MEMORY_ALLOC;
895  }
896 
897  ret = qsv_setup_mids(resp, frames_ref, mids);
898  ff_refstruct_unref(&mids);
899  av_buffer_unref(&frames_ref);
900  if (ret < 0) {
901  av_log(ctx->logctx, AV_LOG_ERROR,
902  "Error filling an internal frame allocation request\n");
903  return MFX_ERR_MEMORY_ALLOC;
904  }
905  } else {
906  return MFX_ERR_UNSUPPORTED;
907  }
908 
909  return MFX_ERR_NONE;
910 }
911 
912 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
913 {
914  if (!resp->mids)
915  return MFX_ERR_NONE;
916 
917  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
918  ff_refstruct_unref(&resp->mids[resp->NumFrameActual + 1]);
919  av_freep(&resp->mids);
920  return MFX_ERR_NONE;
921 }
922 
923 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
924 {
926  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
927  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
928  QSVMid *qsv_mid;
929  AVHWFramesContext *hw_frames_ctx;
930  AVQSVFramesContext *hw_frames_hwctx;
931  int ret;
932 
933  if (!frames_hwctx->nb_surfaces)
934  return MFX_ERR_UNSUPPORTED;
935 
936  qsv_mid = mid;
937  hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
938  hw_frames_hwctx = hw_frames_ctx->hwctx;
939  if (qsv_mid->locked_frame)
940  return MFX_ERR_UNDEFINED_BEHAVIOR;
941 
942  /* Allocate a system memory frame that will hold the mapped data. */
943  qsv_mid->locked_frame = av_frame_alloc();
944  if (!qsv_mid->locked_frame)
945  return MFX_ERR_MEMORY_ALLOC;
946  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
947 
948  /* wrap the provided handle in a hwaccel AVFrame */
949  qsv_mid->hw_frame = av_frame_alloc();
950  if (!qsv_mid->hw_frame)
951  goto fail;
952 
953  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
954  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
955 
956  // doesn't really matter what buffer is used here
957  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
958  if (!qsv_mid->hw_frame->buf[0])
959  goto fail;
960 
961  qsv_mid->hw_frame->width = hw_frames_ctx->width;
962  qsv_mid->hw_frame->height = hw_frames_ctx->height;
963 
964  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
965  if (!qsv_mid->hw_frame->hw_frames_ctx)
966  goto fail;
967 
968  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
969  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
970 
971  /* map the data to the system memory */
972  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
974  if (ret < 0)
975  goto fail;
976 
977  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
978  ptr->Y = qsv_mid->locked_frame->data[0];
979  ptr->U = qsv_mid->locked_frame->data[1];
980  ptr->V = qsv_mid->locked_frame->data[1] + 1;
981 
982  return MFX_ERR_NONE;
983 fail:
984  av_frame_free(&qsv_mid->hw_frame);
985  av_frame_free(&qsv_mid->locked_frame);
986  return MFX_ERR_MEMORY_ALLOC;
987 }
988 
989 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
990 {
992  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
993  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
994  QSVMid *qsv_mid;
995 
996  if (!frames_hwctx->nb_surfaces)
997  return MFX_ERR_UNSUPPORTED;
998 
999  qsv_mid = mid;
1000  av_frame_free(&qsv_mid->locked_frame);
1001  av_frame_free(&qsv_mid->hw_frame);
1002 
1003  return MFX_ERR_NONE;
1004 }
1005 
1006 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
1007 {
1009  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
1010  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
1011  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
1012  mfxHDLPair *pair_src;
1013 
1014  if (frames_hwctx->nb_surfaces) {
1015  QSVMid *qsv_mid = (QSVMid*)mid;
1016  pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
1017  } else {
1018  pair_src = (mfxHDLPair*)mid;
1019  }
1020 
1021  pair_dst->first = pair_src->first;
1022 
1023  if (pair_src->second != (mfxMemId)MFX_INFINITE)
1024  pair_dst->second = pair_src->second;
1025  return MFX_ERR_NONE;
1026 }
1027 
1028 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
1029  AVBufferRef *device_ref, const char *load_plugins,
1030  int gpu_copy)
1031 {
1032  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
1033  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
1034  mfxSession parent_session = device_hwctx->session;
1035  void *loader = device_hwctx->loader;
1036  mfxHDL handle = NULL;
1037  int hw_handle_supported = 0;
1038 
1039  mfxSession session;
1040  mfxVersion ver;
1041  mfxIMPL impl;
1042  mfxHandleType handle_type;
1043  mfxStatus err;
1044  int ret;
1045 
1046  err = MFXQueryIMPL(parent_session, &impl);
1047  if (err == MFX_ERR_NONE)
1048  err = MFXQueryVersion(parent_session, &ver);
1049  if (err != MFX_ERR_NONE)
1050  return ff_qsv_print_error(avctx, err,
1051  "Error querying the session attributes");
1052 
1053  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
1054  handle_type = MFX_HANDLE_VA_DISPLAY;
1055  hw_handle_supported = 1;
1056  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
1057  handle_type = MFX_HANDLE_D3D11_DEVICE;
1058  hw_handle_supported = 1;
1059  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
1060  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1061  hw_handle_supported = 1;
1062  }
1063 
1064  if (hw_handle_supported) {
1065  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
1066  if (err != MFX_ERR_NONE) {
1067  return ff_qsv_print_error(avctx, err,
1068  "Error getting handle session");
1069  }
1070  }
1071  if (!handle) {
1072  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
1073  "from the session\n");
1074  }
1075 
1076  ret = qsv_create_mfx_session(avctx, impl, &ver, gpu_copy, &session,
1077  &loader);
1078  if (ret)
1079  return ret;
1080 
1081  if (handle) {
1082  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
1083  if (err != MFX_ERR_NONE)
1084  return ff_qsv_print_error(avctx, err,
1085  "Error setting a HW handle");
1086  }
1087 
1088  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1089  err = MFXJoinSession(parent_session, session);
1090  if (err != MFX_ERR_NONE)
1091  return ff_qsv_print_error(avctx, err,
1092  "Error joining session");
1093  }
1094 
1095  ret = qsv_load_plugins(session, load_plugins, avctx);
1096  if (ret < 0) {
1097  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
1098  return ret;
1099  }
1100 
1101  *psession = session;
1102  return 0;
1103 }
1104 
1105 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
1106  QSVFramesContext *qsv_frames_ctx,
1107  const char *load_plugins, int opaque, int gpu_copy)
1108 {
1109  mfxFrameAllocator frame_allocator = {
1110  .pthis = qsv_frames_ctx,
1111  .Alloc = qsv_frame_alloc,
1112  .Lock = qsv_frame_lock,
1113  .Unlock = qsv_frame_unlock,
1114  .GetHDL = qsv_frame_get_hdl,
1115  .Free = qsv_frame_free,
1116  };
1117 
1118  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
1119  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
1120 
1121  mfxSession session;
1122  mfxStatus err;
1123 
1124  int ret;
1125 
1126  ret = ff_qsv_init_session_device(avctx, &session,
1127  frames_ctx->device_ref, load_plugins, gpu_copy);
1128  if (ret < 0)
1129  return ret;
1130 
1131  if (!opaque) {
1132  qsv_frames_ctx->logctx = avctx;
1133  qsv_frames_ctx->mids = NULL;
1134  qsv_frames_ctx->nb_mids = 0;
1135 
1136  /* allocate the memory ids for the external frames */
1137  if (frames_hwctx->nb_surfaces) {
1138  ff_refstruct_unref(&qsv_frames_ctx->mids);
1139  qsv_frames_ctx->mids = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
1140  if (!qsv_frames_ctx->mids)
1141  return AVERROR(ENOMEM);
1142  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
1143  }
1144 
1145  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
1146  if (err != MFX_ERR_NONE)
1147  return ff_qsv_print_error(avctx, err,
1148  "Error setting a frame allocator");
1149  }
1150 
1151  *psession = session;
1152  return 0;
1153 }
1154 
1156 {
1157  if (qs->session) {
1158  MFXClose(qs->session);
1159  qs->session = NULL;
1160  }
1161 
1162  if (qs->loader) {
1163  MFXUnload(qs->loader);
1164  qs->loader = NULL;
1165  }
1166 
1167 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
1168  av_buffer_unref(&qs->va_device_ref);
1169 #endif
1170  return 0;
1171 }
1172 
1174  mfxExtBuffer * param)
1175 {
1176  int i;
1177 
1178  for (i = 0; i < frame->num_ext_params; i++) {
1179  mfxExtBuffer *ext_buffer = frame->ext_param[i];
1180 
1181  if (ext_buffer->BufferId == param->BufferId) {
1182  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
1183  "added\n");
1184  return;
1185  }
1186  }
1187 
1188  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
1189  frame->ext_param[frame->num_ext_params] = param;
1190  frame->num_ext_params++;
1191  frame->surface.Data.NumExtParam = frame->num_ext_params;
1192  } else {
1193  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
1194  "have enough space\n");
1195  }
1196 
1197 
1198 }
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:57
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:989
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:125
ff_refstruct_ref
void * ff_refstruct_ref(void *obj)
Create a new reference to an object managed via this API, i.e.
Definition: refstruct.c:140
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2423
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:47
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:115
AVFieldOrder
AVFieldOrder
Definition: defs.h:199
averr
int averr
Definition: qsv.c:119
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: defs.h:201
AVPictureType
AVPictureType
Definition: avutil.h:277
ff_refstruct_alloc_ext
static void * ff_refstruct_alloc_ext(size_t size, unsigned flags, void *opaque, void(*free_cb)(FFRefStructOpaque opaque, void *obj))
A wrapper around ff_refstruct_alloc_ext_c() for the common case of a non-const qualified opaque.
Definition: refstruct.h:94
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:1155
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:375
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:344
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:534
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:34
FFRefStructOpaque
RefStruct is an API for creating reference-counted objects with minimal overhead.
Definition: refstruct.h:58
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
mids_buf_free
static void mids_buf_free(FFRefStructOpaque opaque, void *obj)
Definition: qsv.c:744
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:1028
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: defs.h:202
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:283
fail
#define fail()
Definition: checkasm.h:185
MFXUnload
#define MFXUnload(a)
Definition: qsv.c:51
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:404
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
refstruct.h
desc
const char * desc
Definition: qsv.c:86
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:532
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, QSVMid *mids)
Definition: qsv.c:780
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:528
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:194
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: defs.h:200
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:48
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:220
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:63
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:49
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:393
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
if
if(ret)
Definition: filter_design.txt:179
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:1105
QSVFrame
Definition: qsv_internal.h:79
NULL
#define NULL
Definition: coverity.c:32
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:280
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:283
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:100
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:284
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:384
mfxerr
mfxStatus mfxerr
Definition: qsv.c:118
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:529
shift
static int shift(int a, int b)
Definition: bonk.c:261
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:71
qsv_errors
static const struct @181 qsv_errors[]
FFRefStructOpaque::nc
void * nc
Definition: refstruct.h:59
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:278
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:533
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:85
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:72
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
The memory ids for the external frames.
Definition: qsv_internal.h:124
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:59
hwcontext_qsv.h
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
Definition: qsv.c:224
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:357
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
qsv_iopatterns
static const struct @180 qsv_iopatterns[]
AV_FIELD_BB
@ AV_FIELD_BB
Bottom coded first, bottom displayed first.
Definition: defs.h:203
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:536
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:122
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:170
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:264
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:912
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVSession
Definition: qsv_internal.h:105
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:150
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:54
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:1006
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
AVCodecContext
main external API structure.
Definition: avcodec.h:445
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:810
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:143
qsv_create_mfx_session
static int qsv_create_mfx_session(AVCodecContext *avctx, mfxIMPL implementation, mfxVersion *pver, int gpu_copy, mfxSession *psession, void **ploader)
Definition: qsv.c:638
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:40
QSVSession::session
mfxSession session
Definition: qsv_internal.h:106
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:203
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:528
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:116
QSVFramesContext
Definition: qsv_internal.h:114
qsv_create_mids
static QSVMid * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:750
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:535
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:192
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
QSVSession::loader
void * loader
Definition: qsv_internal.h:111
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:1173
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
QSVMid
Definition: qsv_internal.h:70
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:185
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:246
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:677
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:923
ff_refstruct_unref
void ff_refstruct_unref(void *objp)
Decrement the reference count of the underlying object and automatically free the object if there are...
Definition: refstruct.c:120