FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfx/mfxvideo.h>
22 #include <mfx/mfxplugin.h>
23 #include <mfx/mfxjpeg.h>
24 
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/common.h"
30 #include "libavutil/error.h"
31 #include "libavutil/hwcontext.h"
33 #include "libavutil/imgutils.h"
34 #include "libavutil/avassert.h"
35 
36 #include "avcodec.h"
37 #include "qsv_internal.h"
38 
39 #if QSV_VERSION_ATLEAST(1, 12)
40 #include "mfx/mfxvp8.h"
41 #endif
42 
44 {
45  switch (codec_id) {
46  case AV_CODEC_ID_H264:
47  return MFX_CODEC_AVC;
48 #if QSV_VERSION_ATLEAST(1, 8)
49  case AV_CODEC_ID_HEVC:
50  return MFX_CODEC_HEVC;
51 #endif
54  return MFX_CODEC_MPEG2;
55  case AV_CODEC_ID_VC1:
56  return MFX_CODEC_VC1;
57 #if QSV_VERSION_ATLEAST(1, 12)
58  case AV_CODEC_ID_VP8:
59  return MFX_CODEC_VP8;
60 #endif
61  case AV_CODEC_ID_MJPEG:
62  return MFX_CODEC_JPEG;
63 #if QSV_VERSION_ATLEAST(1, 19)
64  case AV_CODEC_ID_VP9:
65  return MFX_CODEC_VP9;
66 #endif
67 
68  default:
69  break;
70  }
71 
72  return AVERROR(ENOSYS);
73 }
74 
76 {
77  if (level == FF_LEVEL_UNKNOWN)
78  return MFX_LEVEL_UNKNOWN;
79 
80  switch (codec_id) {
81  case AV_CODEC_ID_HEVC:
82  return level / 3;
83  default:
84  return level;
85  }
86 }
87 
88 static const struct {
90  const char *desc;
91 } qsv_iopatterns[] = {
92  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
93  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
94  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
95  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
96  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
97  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
98 };
99 
100 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
101  const char *extra_string)
102 {
103  const char *desc = NULL;
104 
105  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
106  if (qsv_iopatterns[i].mfx_iopattern == mfx_iopattern) {
107  desc = qsv_iopatterns[i].desc;
108  }
109  }
110  if (!desc)
111  desc = "unknown iopattern";
112 
113  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
114  return 0;
115 }
116 
117 static const struct {
118  mfxStatus mfxerr;
119  int averr;
120  const char *desc;
121 } qsv_errors[] = {
122  { MFX_ERR_NONE, 0, "success" },
123  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
124  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
125  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
126  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
127  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
128  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
129  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
130  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
131  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
132  /* the following 3 errors should always be handled explicitly, so those "mappings"
133  * are for completeness only */
134  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
135  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
136  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
137  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
138  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
139  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
140  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
141  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
142  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
143  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
144  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
145 
146  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
147  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
148  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
149  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
150  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
151  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
152  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
153  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
154  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
155 };
156 
157 int ff_qsv_map_error(mfxStatus mfx_err, const char **desc)
158 {
159  int i;
160  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
161  if (qsv_errors[i].mfxerr == mfx_err) {
162  if (desc)
163  *desc = qsv_errors[i].desc;
164  return qsv_errors[i].averr;
165  }
166  }
167  if (desc)
168  *desc = "unknown error";
169  return AVERROR_UNKNOWN;
170 }
171 
172 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
173  const char *error_string)
174 {
175  const char *desc;
176  int ret;
177  ret = ff_qsv_map_error(err, &desc);
178  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
179  return ret;
180 }
181 
182 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
183  const char *warning_string)
184 {
185  const char *desc;
186  int ret;
187  ret = ff_qsv_map_error(err, &desc);
188  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
189  return ret;
190 }
191 
193 {
194  switch (fourcc) {
195  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
196  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
197  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
198 #if CONFIG_VAAPI
199  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
200 #if QSV_VERSION_ATLEAST(1, 27)
201  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
202 #endif
203 #endif
204  }
205  return AV_PIX_FMT_NONE;
206 }
207 
209 {
210  switch (format) {
211  case AV_PIX_FMT_YUV420P:
212  case AV_PIX_FMT_YUVJ420P:
213  case AV_PIX_FMT_NV12:
214  *fourcc = MFX_FOURCC_NV12;
215  return AV_PIX_FMT_NV12;
217  case AV_PIX_FMT_P010:
218  *fourcc = MFX_FOURCC_P010;
219  return AV_PIX_FMT_P010;
220 #if CONFIG_VAAPI
221  case AV_PIX_FMT_YUV422P:
222  case AV_PIX_FMT_YUYV422:
223  *fourcc = MFX_FOURCC_YUY2;
224  return AV_PIX_FMT_YUYV422;
225 #if QSV_VERSION_ATLEAST(1, 27)
227  case AV_PIX_FMT_Y210:
228  *fourcc = MFX_FOURCC_Y210;
229  return AV_PIX_FMT_Y210;
230 #endif
231 #endif
232  default:
233  return AVERROR(ENOSYS);
234  }
235 }
236 
238 {
239  int i;
240  for (i = 0; i < ctx->nb_mids; i++) {
241  QSVMid *mid = &ctx->mids[i];
242  if (mid->handle == frame->surface.Data.MemId)
243  return i;
244  }
245  return AVERROR_BUG;
246 }
247 
248 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
249 {
251  switch (mfx_pic_struct & 0xF) {
252  case MFX_PICSTRUCT_PROGRESSIVE:
253  field = AV_FIELD_PROGRESSIVE;
254  break;
255  case MFX_PICSTRUCT_FIELD_TFF:
256  field = AV_FIELD_TT;
257  break;
258  case MFX_PICSTRUCT_FIELD_BFF:
259  field = AV_FIELD_BB;
260  break;
261  }
262 
263  return field;
264 }
265 
266 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
267 {
268  enum AVPictureType type;
269  switch (mfx_pic_type & 0x7) {
270  case MFX_FRAMETYPE_I:
271  if (mfx_pic_type & MFX_FRAMETYPE_S)
272  type = AV_PICTURE_TYPE_SI;
273  else
274  type = AV_PICTURE_TYPE_I;
275  break;
276  case MFX_FRAMETYPE_B:
277  type = AV_PICTURE_TYPE_B;
278  break;
279  case MFX_FRAMETYPE_P:
280  if (mfx_pic_type & MFX_FRAMETYPE_S)
281  type = AV_PICTURE_TYPE_SP;
282  else
283  type = AV_PICTURE_TYPE_P;
284  break;
285  case MFX_FRAMETYPE_UNKNOWN:
286  type = AV_PICTURE_TYPE_NONE;
287  break;
288  default:
289  av_assert0(0);
290  }
291 
292  return type;
293 }
294 
295 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
296  void *logctx)
297 {
298  if (!load_plugins || !*load_plugins)
299  return 0;
300 
301  while (*load_plugins) {
302  mfxPluginUID uid;
303  mfxStatus ret;
304  int i, err = 0;
305 
306  char *plugin = av_get_token(&load_plugins, ":");
307  if (!plugin)
308  return AVERROR(ENOMEM);
309  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
310  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
311  err = AVERROR(EINVAL);
312  goto load_plugin_fail;
313  }
314 
315  for (i = 0; i < sizeof(uid.Data); i++) {
316  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
317  if (err != 1) {
318  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
319  err = AVERROR(EINVAL);
320  goto load_plugin_fail;
321  }
322 
323  }
324 
325  ret = MFXVideoUSER_Load(session, &uid, 1);
326  if (ret < 0) {
327  char errorbuf[128];
328  snprintf(errorbuf, sizeof(errorbuf),
329  "Could not load the requested plugin '%s'", plugin);
330  err = ff_qsv_print_error(logctx, ret, errorbuf);
331  goto load_plugin_fail;
332  }
333 
334  if (*load_plugins)
335  load_plugins++;
336 load_plugin_fail:
337  av_freep(&plugin);
338  if (err < 0)
339  return err;
340  }
341 
342  return 0;
343 
344 }
345 
346 //This code is only required for Linux since a display handle is required.
347 //For Windows the session is complete and ready to use.
348 
349 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
350 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
351 {
352  AVDictionary *child_device_opts = NULL;
353  AVVAAPIDeviceContext *hwctx;
354  int ret;
355 
356  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
357  av_dict_set(&child_device_opts, "driver", "iHD", 0);
358 
359  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
360  if (ret < 0) {
361  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
362  return ret;
363  } else {
364  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
365  hwctx = qs->va_device_ctx->hwctx;
366 
367  ret = MFXVideoCORE_SetHandle(qs->session,
368  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
369  if (ret < 0) {
370  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
371  }
372  }
373 
374  av_dict_free(&child_device_opts);
375 
376  return 0;
377 }
378 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
379 
381  const char *load_plugins, int gpu_copy)
382 {
383  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
384  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
385  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
386 
387  const char *desc;
388  int ret;
389 
390 #if QSV_VERSION_ATLEAST(1, 16)
391  init_par.GPUCopy = gpu_copy;
392 #endif
393  init_par.Implementation = impl;
394  init_par.Version = ver;
395  ret = MFXInitEx(init_par, &qs->session);
396  if (ret < 0)
397  return ff_qsv_print_error(avctx, ret,
398  "Error initializing an internal MFX session");
399 
400 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
401  ret = ff_qsv_set_display_handle(avctx, qs);
402  if (ret < 0)
403  return ret;
404 #endif
405 
406  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
407  if (ret < 0) {
408  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
409  return ret;
410  }
411 
412  MFXQueryIMPL(qs->session, &impl);
413 
414  switch (MFX_IMPL_BASETYPE(impl)) {
415  case MFX_IMPL_SOFTWARE:
416  desc = "software";
417  break;
418  case MFX_IMPL_HARDWARE:
419  case MFX_IMPL_HARDWARE2:
420  case MFX_IMPL_HARDWARE3:
421  case MFX_IMPL_HARDWARE4:
422  desc = "hardware accelerated";
423  break;
424  default:
425  desc = "unknown";
426  }
427 
428  av_log(avctx, AV_LOG_VERBOSE,
429  "Initialized an internal MFX session using %s implementation\n",
430  desc);
431 
432  return 0;
433 }
434 
435 static void mids_buf_free(void *opaque, uint8_t *data)
436 {
437  AVBufferRef *hw_frames_ref = opaque;
438  av_buffer_unref(&hw_frames_ref);
439  av_freep(&data);
440 }
441 
442 static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
443 {
444  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
445  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
446  int nb_surfaces = frames_hwctx->nb_surfaces;
447 
448  AVBufferRef *mids_buf, *hw_frames_ref1;
449  QSVMid *mids;
450  int i;
451 
452  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
453  if (!hw_frames_ref1)
454  return NULL;
455 
456  mids = av_mallocz_array(nb_surfaces, sizeof(*mids));
457  if (!mids) {
458  av_buffer_unref(&hw_frames_ref1);
459  return NULL;
460  }
461 
462  mids_buf = av_buffer_create((uint8_t*)mids, nb_surfaces * sizeof(*mids),
463  mids_buf_free, hw_frames_ref1, 0);
464  if (!mids_buf) {
465  av_buffer_unref(&hw_frames_ref1);
466  av_freep(&mids);
467  return NULL;
468  }
469 
470  for (i = 0; i < nb_surfaces; i++) {
471  QSVMid *mid = &mids[i];
472  mid->handle = frames_hwctx->surfaces[i].Data.MemId;
473  mid->hw_frames_ref = hw_frames_ref1;
474  }
475 
476  return mids_buf;
477 }
478 
479 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
480  AVBufferRef *mids_buf)
481 {
482  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
483  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
484  QSVMid *mids = (QSVMid*)mids_buf->data;
485  int nb_surfaces = frames_hwctx->nb_surfaces;
486  int i;
487 
488  // the allocated size of the array is two larger than the number of
489  // surfaces, we store the references to the frames context and the
490  // QSVMid array there
491  resp->mids = av_mallocz_array(nb_surfaces + 2, sizeof(*resp->mids));
492  if (!resp->mids)
493  return AVERROR(ENOMEM);
494 
495  for (i = 0; i < nb_surfaces; i++)
496  resp->mids[i] = &mids[i];
497  resp->NumFrameActual = nb_surfaces;
498 
499  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
500  if (!resp->mids[resp->NumFrameActual]) {
501  av_freep(&resp->mids);
502  return AVERROR(ENOMEM);
503  }
504 
505  resp->mids[resp->NumFrameActual + 1] = av_buffer_ref(mids_buf);
506  if (!resp->mids[resp->NumFrameActual + 1]) {
507  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
508  av_freep(&resp->mids);
509  return AVERROR(ENOMEM);
510  }
511 
512  return 0;
513 }
514 
515 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
516  mfxFrameAllocResponse *resp)
517 {
518  QSVFramesContext *ctx = pthis;
519  int ret;
520 
521  /* this should only be called from an encoder or decoder and
522  * only allocates video memory frames */
523  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
524  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
525  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
526  return MFX_ERR_UNSUPPORTED;
527 
528  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
529  /* external frames -- fill from the caller-supplied frames context */
531  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
532  mfxFrameInfo *i = &req->Info;
533  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
534 
535  if (i->Width > i1->Width || i->Height > i1->Height ||
536  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
537  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
538  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
539  i->Width, i->Height, i->FourCC, i->ChromaFormat,
540  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
541  return MFX_ERR_UNSUPPORTED;
542  }
543 
544  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
545  if (ret < 0) {
546  av_log(ctx->logctx, AV_LOG_ERROR,
547  "Error filling an external frame allocation request\n");
548  return MFX_ERR_MEMORY_ALLOC;
549  }
550  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
551  /* internal frames -- allocate a new hw frames context */
552  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
553  mfxFrameInfo *i = &req->Info;
554 
555  AVBufferRef *frames_ref, *mids_buf;
556  AVHWFramesContext *frames_ctx;
557  AVQSVFramesContext *frames_hwctx;
558 
559  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
560  if (!frames_ref)
561  return MFX_ERR_MEMORY_ALLOC;
562 
563  frames_ctx = (AVHWFramesContext*)frames_ref->data;
564  frames_hwctx = frames_ctx->hwctx;
565 
566  frames_ctx->format = AV_PIX_FMT_QSV;
567  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
568  frames_ctx->width = i->Width;
569  frames_ctx->height = i->Height;
570  frames_ctx->initial_pool_size = req->NumFrameSuggested;
571 
572  frames_hwctx->frame_type = req->Type;
573 
574  ret = av_hwframe_ctx_init(frames_ref);
575  if (ret < 0) {
576  av_log(ctx->logctx, AV_LOG_ERROR,
577  "Error initializing a frames context for an internal frame "
578  "allocation request\n");
579  av_buffer_unref(&frames_ref);
580  return MFX_ERR_MEMORY_ALLOC;
581  }
582 
583  mids_buf = qsv_create_mids(frames_ref);
584  if (!mids_buf) {
585  av_buffer_unref(&frames_ref);
586  return MFX_ERR_MEMORY_ALLOC;
587  }
588 
589  ret = qsv_setup_mids(resp, frames_ref, mids_buf);
590  av_buffer_unref(&mids_buf);
591  av_buffer_unref(&frames_ref);
592  if (ret < 0) {
593  av_log(ctx->logctx, AV_LOG_ERROR,
594  "Error filling an internal frame allocation request\n");
595  return MFX_ERR_MEMORY_ALLOC;
596  }
597  } else {
598  return MFX_ERR_UNSUPPORTED;
599  }
600 
601  return MFX_ERR_NONE;
602 }
603 
604 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
605 {
606  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
607  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual + 1]);
608  av_freep(&resp->mids);
609  return MFX_ERR_NONE;
610 }
611 
612 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
613 {
614  QSVMid *qsv_mid = mid;
615  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
616  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
617  int ret;
618 
619  if (qsv_mid->locked_frame)
620  return MFX_ERR_UNDEFINED_BEHAVIOR;
621 
622  /* Allocate a system memory frame that will hold the mapped data. */
623  qsv_mid->locked_frame = av_frame_alloc();
624  if (!qsv_mid->locked_frame)
625  return MFX_ERR_MEMORY_ALLOC;
626  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
627 
628  /* wrap the provided handle in a hwaccel AVFrame */
629  qsv_mid->hw_frame = av_frame_alloc();
630  if (!qsv_mid->hw_frame)
631  goto fail;
632 
633  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
634  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
635 
636  // doesn't really matter what buffer is used here
637  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
638  if (!qsv_mid->hw_frame->buf[0])
639  goto fail;
640 
641  qsv_mid->hw_frame->width = hw_frames_ctx->width;
642  qsv_mid->hw_frame->height = hw_frames_ctx->height;
643 
644  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
645  if (!qsv_mid->hw_frame->hw_frames_ctx)
646  goto fail;
647 
648  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
649  qsv_mid->surf.Data.MemId = qsv_mid->handle;
650 
651  /* map the data to the system memory */
652  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
654  if (ret < 0)
655  goto fail;
656 
657  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
658  ptr->Y = qsv_mid->locked_frame->data[0];
659  ptr->U = qsv_mid->locked_frame->data[1];
660  ptr->V = qsv_mid->locked_frame->data[1] + 1;
661 
662  return MFX_ERR_NONE;
663 fail:
664  av_frame_free(&qsv_mid->hw_frame);
665  av_frame_free(&qsv_mid->locked_frame);
666  return MFX_ERR_MEMORY_ALLOC;
667 }
668 
669 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
670 {
671  QSVMid *qsv_mid = mid;
672 
673  av_frame_free(&qsv_mid->locked_frame);
674  av_frame_free(&qsv_mid->hw_frame);
675 
676  return MFX_ERR_NONE;
677 }
678 
679 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
680 {
681  QSVMid *qsv_mid = (QSVMid*)mid;
682  *hdl = qsv_mid->handle;
683  return MFX_ERR_NONE;
684 }
685 
686 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
687  AVBufferRef *device_ref, const char *load_plugins,
688  int gpu_copy)
689 {
690  static const mfxHandleType handle_types[] = {
691  MFX_HANDLE_VA_DISPLAY,
692  MFX_HANDLE_D3D9_DEVICE_MANAGER,
693  MFX_HANDLE_D3D11_DEVICE,
694  };
695  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
696  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
697  mfxSession parent_session = device_hwctx->session;
698  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
699  mfxHDL handle = NULL;
700 
701  mfxSession session;
702  mfxVersion ver;
703  mfxIMPL impl;
704  mfxHandleType handle_type;
705  mfxStatus err;
706 
707  int i, ret;
708 
709  err = MFXQueryIMPL(parent_session, &impl);
710  if (err == MFX_ERR_NONE)
711  err = MFXQueryVersion(parent_session, &ver);
712  if (err != MFX_ERR_NONE)
713  return ff_qsv_print_error(avctx, err,
714  "Error querying the session attributes");
715 
716  for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
717  err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
718  if (err == MFX_ERR_NONE) {
720  break;
721  }
722  handle = NULL;
723  }
724  if (!handle) {
725  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
726  "from the session\n");
727  }
728 
729 #if QSV_VERSION_ATLEAST(1, 16)
730  init_par.GPUCopy = gpu_copy;
731 #endif
732  init_par.Implementation = impl;
733  init_par.Version = ver;
734  err = MFXInitEx(init_par, &session);
735  if (err != MFX_ERR_NONE)
736  return ff_qsv_print_error(avctx, err,
737  "Error initializing a child MFX session");
738 
739  if (handle) {
740  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
741  if (err != MFX_ERR_NONE)
742  return ff_qsv_print_error(avctx, err,
743  "Error setting a HW handle");
744  }
745 
746  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
747  err = MFXJoinSession(parent_session, session);
748  if (err != MFX_ERR_NONE)
749  return ff_qsv_print_error(avctx, err,
750  "Error joining session");
751  }
752 
753  ret = qsv_load_plugins(session, load_plugins, avctx);
754  if (ret < 0) {
755  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
756  return ret;
757  }
758 
759  *psession = session;
760  return 0;
761 }
762 
763 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
764  QSVFramesContext *qsv_frames_ctx,
765  const char *load_plugins, int opaque, int gpu_copy)
766 {
767  mfxFrameAllocator frame_allocator = {
768  .pthis = qsv_frames_ctx,
769  .Alloc = qsv_frame_alloc,
770  .Lock = qsv_frame_lock,
771  .Unlock = qsv_frame_unlock,
772  .GetHDL = qsv_frame_get_hdl,
773  .Free = qsv_frame_free,
774  };
775 
776  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
777  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
778 
779  mfxSession session;
780  mfxStatus err;
781 
782  int ret;
783 
784  ret = ff_qsv_init_session_device(avctx, &session,
785  frames_ctx->device_ref, load_plugins, gpu_copy);
786  if (ret < 0)
787  return ret;
788 
789  if (!opaque) {
790  qsv_frames_ctx->logctx = avctx;
791 
792  /* allocate the memory ids for the external frames */
793  av_buffer_unref(&qsv_frames_ctx->mids_buf);
794  qsv_frames_ctx->mids_buf = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
795  if (!qsv_frames_ctx->mids_buf)
796  return AVERROR(ENOMEM);
797  qsv_frames_ctx->mids = (QSVMid*)qsv_frames_ctx->mids_buf->data;
798  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
799 
800  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
801  if (err != MFX_ERR_NONE)
802  return ff_qsv_print_error(avctx, err,
803  "Error setting a frame allocator");
804  }
805 
806  *psession = session;
807  return 0;
808 }
809 
811 {
812  if (qs->session) {
813  MFXClose(qs->session);
814  qs->session = NULL;
815  }
816 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
817  av_buffer_unref(&qs->va_device_ref);
818 #endif
819  return 0;
820 }
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
#define NULL
Definition: coverity.c:32
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:515
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
static const mfxHandleType handle_types[]
Definition: qsvvpp.c:71
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:669
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
mfxHandleType handle_type
Definition: hwcontext_qsv.c:89
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:499
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
uint32_t fourcc
Definition: vaapi_decode.c:239
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:295
int ff_qsv_level_to_mfx(enum AVCodecID codec_id, int level)
Definition: qsv.c:75
int averr
Definition: qsv.c:119
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:59
GLint GLenum type
Definition: opengl_enc.c:104
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:686
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate.The lists are not just lists
AVFrame * locked_frame
Definition: qsv_internal.h:67
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:94
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:172
Undefined.
Definition: avutil.h:273
Switching Intra.
Definition: avutil.h:278
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame...
Definition: frame.h:647
UID uid
Definition: mxfenc.c:2138
#define FF_LEVEL_UNKNOWN
Definition: avcodec.h:1983
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
uint8_t
AVFrame * hw_frame
Definition: qsv_internal.h:68
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
AVBufferRef * mids_buf
Definition: qsv_internal.h:101
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
static void mids_buf_free(void *opaque, uint8_t *data)
Definition: qsv.c:435
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:610
int mfx_iopattern
Definition: qsv.c:89
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
int ff_qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an ffmpeg error code.
Definition: qsv.c:157
#define av_log(a,...)
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:763
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:46
int width
Definition: frame.h:366
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:237
error code definitions
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:604
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values. ...
Definition: dict.c:203
mfxHDL handle
Definition: qsv_internal.h:65
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:100
simple assert() macros that are a bit more flexible than ISO C assert().
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:29
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:810
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:266
#define fail()
Definition: checkasm.h:123
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:149
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:182
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:208
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:49
const char * desc
Definition: qsv.c:90
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:43
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:679
AVFormatContext * ctx
Definition: movenc.c:48
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
mfxFrameSurface1 surface
Definition: qsv_internal.h:74
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:51
#define FF_ARRAY_ELEMS(a)
if(ret)
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:381
Libavcodec external API header.
AVBufferRef * av_buffer_alloc(int size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:67
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:339
main external API structure.
Definition: avcodec.h:526
uint8_t * data
The data buffer.
Definition: buffer.h:89
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
Switching Predicted.
Definition: avutil.h:279
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:64
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:380
AVPictureType
Definition: avutil.h:272
#define snprintf
Definition: snprintf.h:34
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
Definition: qsv.c:479
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:442
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:322
uint8_t level
Definition: svq3.c:209
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
AVFieldOrder
Definition: codec_par.h:36
mfxStatus mfxerr
Definition: qsv.c:118
A reference to a data buffer.
Definition: buffer.h:81
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal and external API header
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:612
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:789
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
mfxFrameSurface1 surf
Definition: qsv_internal.h:69
Bi-dir predicted.
Definition: avutil.h:276
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:48
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
static const struct @119 qsv_iopatterns[]
VAAPI connection details.
The mapping must be direct.
Definition: hwcontext.h:540
static const struct @120 qsv_errors[]
int height
Definition: frame.h:366
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:248
#define av_freep(p)
An API-specific header for AV_HWDEVICE_TYPE_QSV.
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:451
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
mfxSession session
Definition: qsv_internal.h:86
int i
Definition: input.c:407
Predicted.
Definition: avutil.h:275
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:192
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:190