FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfx/mfxvideo.h>
22 #include <mfx/mfxplugin.h>
23 #include <mfx/mfxjpeg.h>
24 
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/common.h"
30 #include "libavutil/error.h"
31 #include "libavutil/hwcontext.h"
33 #include "libavutil/imgutils.h"
34 #include "libavutil/avassert.h"
35 
36 #include "avcodec.h"
37 #include "qsv_internal.h"
38 
39 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
40 
41 #include "mfx/mfxvp8.h"
42 
44 {
45  switch (codec_id) {
46  case AV_CODEC_ID_H264:
47  return MFX_CODEC_AVC;
48  case AV_CODEC_ID_HEVC:
49  return MFX_CODEC_HEVC;
52  return MFX_CODEC_MPEG2;
53  case AV_CODEC_ID_VC1:
54  return MFX_CODEC_VC1;
55  case AV_CODEC_ID_VP8:
56  return MFX_CODEC_VP8;
57  case AV_CODEC_ID_MJPEG:
58  return MFX_CODEC_JPEG;
59  case AV_CODEC_ID_VP9:
60  return MFX_CODEC_VP9;
61 #if QSV_VERSION_ATLEAST(1, 34)
62  case AV_CODEC_ID_AV1:
63  return MFX_CODEC_AV1;
64 #endif
65 
66  default:
67  break;
68  }
69 
70  return AVERROR(ENOSYS);
71 }
72 
73 static const struct {
75  const char *desc;
76 } qsv_iopatterns[] = {
77  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
78  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
79  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
80  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
81  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
82  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
83 };
84 
85 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
86  const char *extra_string)
87 {
88  const char *desc = NULL;
89 
90  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
92  desc = qsv_iopatterns[i].desc;
93  }
94  }
95  if (!desc)
96  desc = "unknown iopattern";
97 
98  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
99  return 0;
100 }
101 
102 static const struct {
103  mfxStatus mfxerr;
104  int averr;
105  const char *desc;
106 } qsv_errors[] = {
107  { MFX_ERR_NONE, 0, "success" },
108  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
109  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
110  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
111  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
112  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
113  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
114  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
115  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
116  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
117  /* the following 3 errors should always be handled explicitly, so those "mappings"
118  * are for completeness only */
119  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
120  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
121  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
122  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
123  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
124  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
125  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
126  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
127  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
128  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
129  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
130  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
131  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
132 
133  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
134  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
135  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
136  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
137  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
138  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
139  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
140  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
141  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
142 
143 #if QSV_VERSION_ATLEAST(1, 31)
144  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
145 #endif
146 };
147 
148 /**
149  * Convert a libmfx error code into an FFmpeg error code.
150  */
151 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
152 {
153  int i;
154  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
155  if (qsv_errors[i].mfxerr == mfx_err) {
156  if (desc)
157  *desc = qsv_errors[i].desc;
158  return qsv_errors[i].averr;
159  }
160  }
161  if (desc)
162  *desc = "unknown error";
163  return AVERROR_UNKNOWN;
164 }
165 
166 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
167  const char *error_string)
168 {
169  const char *desc;
170  int ret = qsv_map_error(err, &desc);
171  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
172  return ret;
173 }
174 
175 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
176  const char *warning_string)
177 {
178  const char *desc;
179  int ret = qsv_map_error(err, &desc);
180  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
181  return ret;
182 }
183 
185 {
186  switch (fourcc) {
187  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
188  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
189  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
190  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
191  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
192 #if CONFIG_VAAPI
193  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
194  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
195 #endif
196  }
197  return AV_PIX_FMT_NONE;
198 }
199 
201 {
202  switch (format) {
203  case AV_PIX_FMT_YUV420P:
204  case AV_PIX_FMT_YUVJ420P:
205  case AV_PIX_FMT_NV12:
206  *fourcc = MFX_FOURCC_NV12;
207  return AV_PIX_FMT_NV12;
209  case AV_PIX_FMT_P010:
210  *fourcc = MFX_FOURCC_P010;
211  return AV_PIX_FMT_P010;
212  case AV_PIX_FMT_X2RGB10:
213  *fourcc = MFX_FOURCC_A2RGB10;
214  return AV_PIX_FMT_X2RGB10;
215  case AV_PIX_FMT_BGRA:
216  *fourcc = MFX_FOURCC_RGB4;
217  return AV_PIX_FMT_BGRA;
218 #if CONFIG_VAAPI
219  case AV_PIX_FMT_YUV422P:
220  case AV_PIX_FMT_YUYV422:
221  *fourcc = MFX_FOURCC_YUY2;
222  return AV_PIX_FMT_YUYV422;
224  case AV_PIX_FMT_Y210:
225  *fourcc = MFX_FOURCC_Y210;
226  return AV_PIX_FMT_Y210;
227 #endif
228  default:
229  return AVERROR(ENOSYS);
230  }
231 }
232 
233 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
234 {
235  switch (frame->format) {
236  case AV_PIX_FMT_NV12:
237  case AV_PIX_FMT_P010:
238  surface->Data.Y = frame->data[0];
239  surface->Data.UV = frame->data[1];
240  /* The SDK checks Data.V when using system memory for VP9 encoding */
241  surface->Data.V = surface->Data.UV + 1;
242  break;
244  case AV_PIX_FMT_BGRA:
245  surface->Data.B = frame->data[0];
246  surface->Data.G = frame->data[0] + 1;
247  surface->Data.R = frame->data[0] + 2;
248  surface->Data.A = frame->data[0] + 3;
249  break;
250  case AV_PIX_FMT_YUYV422:
251  surface->Data.Y = frame->data[0];
252  surface->Data.U = frame->data[0] + 1;
253  surface->Data.V = frame->data[0] + 3;
254  break;
255 
256  case AV_PIX_FMT_Y210:
257  surface->Data.Y16 = (mfxU16 *)frame->data[0];
258  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
259  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
260  break;
261  default:
262  return AVERROR(ENOSYS);
263  }
264  surface->Data.PitchLow = frame->linesize[0];
265 
266  return 0;
267 }
268 
270 {
271  int i;
272  for (i = 0; i < ctx->nb_mids; i++) {
273  QSVMid *mid = &ctx->mids[i];
274  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
275  if ((mid->handle_pair->first == pair->first) &&
276  (mid->handle_pair->second == pair->second))
277  return i;
278  }
279  return AVERROR_BUG;
280 }
281 
282 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
283 {
285  switch (mfx_pic_struct & 0xF) {
286  case MFX_PICSTRUCT_PROGRESSIVE:
288  break;
289  case MFX_PICSTRUCT_FIELD_TFF:
290  field = AV_FIELD_TT;
291  break;
292  case MFX_PICSTRUCT_FIELD_BFF:
293  field = AV_FIELD_BB;
294  break;
295  }
296 
297  return field;
298 }
299 
300 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
301 {
302  enum AVPictureType type;
303  switch (mfx_pic_type & 0x7) {
304  case MFX_FRAMETYPE_I:
305  if (mfx_pic_type & MFX_FRAMETYPE_S)
307  else
309  break;
310  case MFX_FRAMETYPE_B:
312  break;
313  case MFX_FRAMETYPE_P:
314  if (mfx_pic_type & MFX_FRAMETYPE_S)
316  else
318  break;
319  case MFX_FRAMETYPE_UNKNOWN:
321  break;
322  default:
323  av_assert0(0);
324  }
325 
326  return type;
327 }
328 
329 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
330  void *logctx)
331 {
332  if (!load_plugins || !*load_plugins)
333  return 0;
334 
335  while (*load_plugins) {
336  mfxPluginUID uid;
337  mfxStatus ret;
338  int i, err = 0;
339 
340  char *plugin = av_get_token(&load_plugins, ":");
341  if (!plugin)
342  return AVERROR(ENOMEM);
343  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
344  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
345  err = AVERROR(EINVAL);
346  goto load_plugin_fail;
347  }
348 
349  for (i = 0; i < sizeof(uid.Data); i++) {
350  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
351  if (err != 1) {
352  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
353  err = AVERROR(EINVAL);
354  goto load_plugin_fail;
355  }
356 
357  }
358 
359  ret = MFXVideoUSER_Load(session, &uid, 1);
360  if (ret < 0) {
361  char errorbuf[128];
362  snprintf(errorbuf, sizeof(errorbuf),
363  "Could not load the requested plugin '%s'", plugin);
364  err = ff_qsv_print_error(logctx, ret, errorbuf);
365  goto load_plugin_fail;
366  }
367 
368  if (*load_plugins)
369  load_plugins++;
370 load_plugin_fail:
371  av_freep(&plugin);
372  if (err < 0)
373  return err;
374  }
375 
376  return 0;
377 
378 }
379 
380 //This code is only required for Linux since a display handle is required.
381 //For Windows the session is complete and ready to use.
382 
383 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
384 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
385 {
386  AVDictionary *child_device_opts = NULL;
387  AVVAAPIDeviceContext *hwctx;
388  int ret;
389 
390  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
391  av_dict_set(&child_device_opts, "driver", "iHD", 0);
392 
393  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
394  av_dict_free(&child_device_opts);
395  if (ret < 0) {
396  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
397  return ret;
398  } else {
399  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
400  hwctx = qs->va_device_ctx->hwctx;
401 
402  ret = MFXVideoCORE_SetHandle(qs->session,
403  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
404  if (ret < 0) {
405  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
406  }
407  }
408 
409  return 0;
410 }
411 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
412 
414  const char *load_plugins, int gpu_copy)
415 {
416 #if CONFIG_D3D11VA
417  mfxIMPL impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
418 #else
419  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
420 #endif
421  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
422  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
423 
424  const char *desc;
425  int ret;
426 
427  init_par.GPUCopy = gpu_copy;
428  init_par.Implementation = impl;
429  init_par.Version = ver;
430  ret = MFXInitEx(init_par, &qs->session);
431  if (ret < 0)
432  return ff_qsv_print_error(avctx, ret,
433  "Error initializing an internal MFX session");
434 
435 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
436  ret = ff_qsv_set_display_handle(avctx, qs);
437  if (ret < 0)
438  return ret;
439 #endif
440 
441  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
442  if (ret < 0) {
443  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
444  return ret;
445  }
446 
447  ret = MFXQueryIMPL(qs->session, &impl);
448  if (ret != MFX_ERR_NONE)
449  return ff_qsv_print_error(avctx, ret,
450  "Error querying the session attributes");
451 
452  switch (MFX_IMPL_BASETYPE(impl)) {
453  case MFX_IMPL_SOFTWARE:
454  desc = "software";
455  break;
456  case MFX_IMPL_HARDWARE:
457  case MFX_IMPL_HARDWARE2:
458  case MFX_IMPL_HARDWARE3:
459  case MFX_IMPL_HARDWARE4:
460  desc = "hardware accelerated";
461  break;
462  default:
463  desc = "unknown";
464  }
465 
466  av_log(avctx, AV_LOG_VERBOSE,
467  "Initialized an internal MFX session using %s implementation\n",
468  desc);
469 
470  return 0;
471 }
472 
473 static void mids_buf_free(void *opaque, uint8_t *data)
474 {
475  AVBufferRef *hw_frames_ref = opaque;
476  av_buffer_unref(&hw_frames_ref);
477  av_freep(&data);
478 }
479 
480 static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
481 {
482  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
483  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
484  int nb_surfaces = frames_hwctx->nb_surfaces;
485 
486  AVBufferRef *mids_buf, *hw_frames_ref1;
487  QSVMid *mids;
488  int i;
489 
490  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
491  if (!hw_frames_ref1)
492  return NULL;
493 
494  mids = av_calloc(nb_surfaces, sizeof(*mids));
495  if (!mids) {
496  av_buffer_unref(&hw_frames_ref1);
497  return NULL;
498  }
499 
500  mids_buf = av_buffer_create((uint8_t*)mids, nb_surfaces * sizeof(*mids),
501  mids_buf_free, hw_frames_ref1, 0);
502  if (!mids_buf) {
503  av_buffer_unref(&hw_frames_ref1);
504  av_freep(&mids);
505  return NULL;
506  }
507 
508  for (i = 0; i < nb_surfaces; i++) {
509  QSVMid *mid = &mids[i];
510  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
511  mid->hw_frames_ref = hw_frames_ref1;
512  }
513 
514  return mids_buf;
515 }
516 
517 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
518  AVBufferRef *mids_buf)
519 {
520  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
521  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
522  QSVMid *mids = (QSVMid*)mids_buf->data;
523  int nb_surfaces = frames_hwctx->nb_surfaces;
524  int i;
525 
526  // the allocated size of the array is two larger than the number of
527  // surfaces, we store the references to the frames context and the
528  // QSVMid array there
529  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
530  if (!resp->mids)
531  return AVERROR(ENOMEM);
532 
533  for (i = 0; i < nb_surfaces; i++)
534  resp->mids[i] = &mids[i];
535  resp->NumFrameActual = nb_surfaces;
536 
537  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
538  if (!resp->mids[resp->NumFrameActual]) {
539  av_freep(&resp->mids);
540  return AVERROR(ENOMEM);
541  }
542 
543  resp->mids[resp->NumFrameActual + 1] = av_buffer_ref(mids_buf);
544  if (!resp->mids[resp->NumFrameActual + 1]) {
545  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
546  av_freep(&resp->mids);
547  return AVERROR(ENOMEM);
548  }
549 
550  return 0;
551 }
552 
553 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
554  mfxFrameAllocResponse *resp)
555 {
556  QSVFramesContext *ctx = pthis;
557  int ret;
558 
559  /* this should only be called from an encoder or decoder and
560  * only allocates video memory frames */
561  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
562  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
563  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
564  return MFX_ERR_UNSUPPORTED;
565 
566  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
567  /* external frames -- fill from the caller-supplied frames context */
568  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
569  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
570  mfxFrameInfo *i = &req->Info;
571  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
572 
573  if (i->Width > i1->Width || i->Height > i1->Height ||
574  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
575  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
576  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
577  i->Width, i->Height, i->FourCC, i->ChromaFormat,
578  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
579  return MFX_ERR_UNSUPPORTED;
580  }
581 
582  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
583  if (ret < 0) {
584  av_log(ctx->logctx, AV_LOG_ERROR,
585  "Error filling an external frame allocation request\n");
586  return MFX_ERR_MEMORY_ALLOC;
587  }
588  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
589  /* internal frames -- allocate a new hw frames context */
590  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
591  mfxFrameInfo *i = &req->Info;
592 
593  AVBufferRef *frames_ref, *mids_buf;
594  AVHWFramesContext *frames_ctx;
595  AVQSVFramesContext *frames_hwctx;
596 
597  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
598  if (!frames_ref)
599  return MFX_ERR_MEMORY_ALLOC;
600 
601  frames_ctx = (AVHWFramesContext*)frames_ref->data;
602  frames_hwctx = frames_ctx->hwctx;
603 
604  frames_ctx->format = AV_PIX_FMT_QSV;
605  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
606  frames_ctx->width = i->Width;
607  frames_ctx->height = i->Height;
608  frames_ctx->initial_pool_size = req->NumFrameSuggested;
609 
610  frames_hwctx->frame_type = req->Type;
611 
612  ret = av_hwframe_ctx_init(frames_ref);
613  if (ret < 0) {
614  av_log(ctx->logctx, AV_LOG_ERROR,
615  "Error initializing a frames context for an internal frame "
616  "allocation request\n");
617  av_buffer_unref(&frames_ref);
618  return MFX_ERR_MEMORY_ALLOC;
619  }
620 
621  mids_buf = qsv_create_mids(frames_ref);
622  if (!mids_buf) {
623  av_buffer_unref(&frames_ref);
624  return MFX_ERR_MEMORY_ALLOC;
625  }
626 
627  ret = qsv_setup_mids(resp, frames_ref, mids_buf);
628  av_buffer_unref(&mids_buf);
629  av_buffer_unref(&frames_ref);
630  if (ret < 0) {
631  av_log(ctx->logctx, AV_LOG_ERROR,
632  "Error filling an internal frame allocation request\n");
633  return MFX_ERR_MEMORY_ALLOC;
634  }
635  } else {
636  return MFX_ERR_UNSUPPORTED;
637  }
638 
639  return MFX_ERR_NONE;
640 }
641 
642 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
643 {
644  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
645  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual + 1]);
646  av_freep(&resp->mids);
647  return MFX_ERR_NONE;
648 }
649 
650 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
651 {
652  QSVMid *qsv_mid = mid;
653  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
654  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
655  int ret;
656 
657  if (qsv_mid->locked_frame)
658  return MFX_ERR_UNDEFINED_BEHAVIOR;
659 
660  /* Allocate a system memory frame that will hold the mapped data. */
661  qsv_mid->locked_frame = av_frame_alloc();
662  if (!qsv_mid->locked_frame)
663  return MFX_ERR_MEMORY_ALLOC;
664  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
665 
666  /* wrap the provided handle in a hwaccel AVFrame */
667  qsv_mid->hw_frame = av_frame_alloc();
668  if (!qsv_mid->hw_frame)
669  goto fail;
670 
671  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
672  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
673 
674  // doesn't really matter what buffer is used here
675  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
676  if (!qsv_mid->hw_frame->buf[0])
677  goto fail;
678 
679  qsv_mid->hw_frame->width = hw_frames_ctx->width;
680  qsv_mid->hw_frame->height = hw_frames_ctx->height;
681 
682  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
683  if (!qsv_mid->hw_frame->hw_frames_ctx)
684  goto fail;
685 
686  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
687  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
688 
689  /* map the data to the system memory */
690  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
692  if (ret < 0)
693  goto fail;
694 
695  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
696  ptr->Y = qsv_mid->locked_frame->data[0];
697  ptr->U = qsv_mid->locked_frame->data[1];
698  ptr->V = qsv_mid->locked_frame->data[1] + 1;
699 
700  return MFX_ERR_NONE;
701 fail:
702  av_frame_free(&qsv_mid->hw_frame);
703  av_frame_free(&qsv_mid->locked_frame);
704  return MFX_ERR_MEMORY_ALLOC;
705 }
706 
707 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
708 {
709  QSVMid *qsv_mid = mid;
710 
711  av_frame_free(&qsv_mid->locked_frame);
712  av_frame_free(&qsv_mid->hw_frame);
713 
714  return MFX_ERR_NONE;
715 }
716 
717 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
718 {
719  QSVMid *qsv_mid = (QSVMid*)mid;
720  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
721  mfxHDLPair *pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
722 
723  pair_dst->first = pair_src->first;
724 
725  if (pair_src->second != (mfxMemId)MFX_INFINITE)
726  pair_dst->second = pair_src->second;
727  return MFX_ERR_NONE;
728 }
729 
730 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
731  AVBufferRef *device_ref, const char *load_plugins,
732  int gpu_copy)
733 {
734  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
735  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
736  mfxSession parent_session = device_hwctx->session;
737  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
738  mfxHDL handle = NULL;
739  int hw_handle_supported = 0;
740 
741  mfxSession session;
742  mfxVersion ver;
743  mfxIMPL impl;
744  mfxHandleType handle_type;
745  mfxStatus err;
746  int ret;
747 
748  err = MFXQueryIMPL(parent_session, &impl);
749  if (err == MFX_ERR_NONE)
750  err = MFXQueryVersion(parent_session, &ver);
751  if (err != MFX_ERR_NONE)
752  return ff_qsv_print_error(avctx, err,
753  "Error querying the session attributes");
754 
755  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
756  handle_type = MFX_HANDLE_VA_DISPLAY;
757  hw_handle_supported = 1;
758  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
759  handle_type = MFX_HANDLE_D3D11_DEVICE;
760  hw_handle_supported = 1;
761  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
762  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
763  hw_handle_supported = 1;
764  }
765 
766  if (hw_handle_supported) {
767  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
768  if (err != MFX_ERR_NONE) {
769  return ff_qsv_print_error(avctx, err,
770  "Error getting handle session");
771  }
772  }
773  if (!handle) {
774  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
775  "from the session\n");
776  }
777 
778  init_par.GPUCopy = gpu_copy;
779  init_par.Implementation = impl;
780  init_par.Version = ver;
781  err = MFXInitEx(init_par, &session);
782  if (err != MFX_ERR_NONE)
783  return ff_qsv_print_error(avctx, err,
784  "Error initializing a child MFX session");
785 
786  if (handle) {
787  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
788  if (err != MFX_ERR_NONE)
789  return ff_qsv_print_error(avctx, err,
790  "Error setting a HW handle");
791  }
792 
793  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
794  err = MFXJoinSession(parent_session, session);
795  if (err != MFX_ERR_NONE)
796  return ff_qsv_print_error(avctx, err,
797  "Error joining session");
798  }
799 
800  ret = qsv_load_plugins(session, load_plugins, avctx);
801  if (ret < 0) {
802  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
803  return ret;
804  }
805 
806  *psession = session;
807  return 0;
808 }
809 
810 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
811  QSVFramesContext *qsv_frames_ctx,
812  const char *load_plugins, int opaque, int gpu_copy)
813 {
814  mfxFrameAllocator frame_allocator = {
815  .pthis = qsv_frames_ctx,
816  .Alloc = qsv_frame_alloc,
817  .Lock = qsv_frame_lock,
818  .Unlock = qsv_frame_unlock,
819  .GetHDL = qsv_frame_get_hdl,
820  .Free = qsv_frame_free,
821  };
822 
823  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
824  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
825 
826  mfxSession session;
827  mfxStatus err;
828 
829  int ret;
830 
831  ret = ff_qsv_init_session_device(avctx, &session,
832  frames_ctx->device_ref, load_plugins, gpu_copy);
833  if (ret < 0)
834  return ret;
835 
836  if (!opaque) {
837  qsv_frames_ctx->logctx = avctx;
838 
839  /* allocate the memory ids for the external frames */
840  av_buffer_unref(&qsv_frames_ctx->mids_buf);
841  qsv_frames_ctx->mids_buf = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
842  if (!qsv_frames_ctx->mids_buf)
843  return AVERROR(ENOMEM);
844  qsv_frames_ctx->mids = (QSVMid*)qsv_frames_ctx->mids_buf->data;
845  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
846 
847  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
848  if (err != MFX_ERR_NONE)
849  return ff_qsv_print_error(avctx, err,
850  "Error setting a frame allocator");
851  }
852 
853  *psession = session;
854  return 0;
855 }
856 
858 {
859  if (qs->session) {
860  MFXClose(qs->session);
861  qs->session = NULL;
862  }
863 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
864  av_buffer_unref(&qs->va_device_ref);
865 #endif
866  return 0;
867 }
868 
870  mfxExtBuffer * param)
871 {
872  int i;
873 
874  for (i = 0; i < frame->num_ext_params; i++) {
875  mfxExtBuffer *ext_buffer = frame->ext_param[i];
876 
877  if (ext_buffer->BufferId == param->BufferId) {
878  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
879  "added\n");
880  return;
881  }
882  }
883 
884  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
885  frame->ext_param[frame->num_ext_params] = param;
886  frame->num_ext_params++;
887  frame->surface.Data.NumExtParam = frame->num_ext_params;
888  } else {
889  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
890  "have enough space\n");
891  }
892 
893 
894 }
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:58
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:707
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:115
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2200
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:48
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:106
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:154
averr
int averr
Definition: qsv.c:104
QSVMid::locked_frame
AVFrame * locked_frame
Definition: qsv_internal.h:72
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVPictureType
AVPictureType
Definition: avutil.h:272
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:116
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:857
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:300
AVFrame::width
int width
Definition: frame.h:397
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:790
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:269
data
const char data[16]
Definition: mxf.c:146
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:408
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVDictionary
Definition: dict.c:30
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:730
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:233
AV_FIELD_TT
@ AV_FIELD_TT
Definition: codec_par.h:40
fail
#define fail()
Definition: checkasm.h:131
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:329
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
desc
const char * desc
Definition: qsv.c:75
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:104
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:460
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:175
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:49
QSVMid::hw_frame
AVFrame * hw_frame
Definition: qsv_internal.h:73
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:218
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:64
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: codec_par.h:38
ctx
AVFormatContext * ctx
Definition: movenc.c:48
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:374
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:77
if
if(ret)
Definition: filter_design.txt:179
mids_buf_free
static void mids_buf_free(void *opaque, uint8_t *data)
Definition: qsv.c:473
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:810
QSVFrame
Definition: qsv_internal.h:77
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:279
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:278
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:85
QSVFramesContext::mids_buf
AVBufferRef * mids_buf
Definition: qsv_internal.h:113
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:409
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:279
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:51
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:47
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:200
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:353
mfxerr
mfxStatus mfxerr
Definition: qsv.c:103
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:69
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:273
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:74
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:70
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
Definition: qsv_internal.h:114
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_FIELD_BB
@ AV_FIELD_BB
Definition: codec_par.h:41
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:57
hwcontext_qsv.h
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:282
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:224
QSVMid::surf
mfxFrameSurface1 surf
Definition: qsv_internal.h:74
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:461
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:120
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:151
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
qsv_errors
static const struct @121 qsv_errors[]
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:642
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVSession
Definition: qsv_internal.h:97
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:43
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:717
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:664
AVCodecContext
main external API structure.
Definition: avcodec.h:398
AVFrame::height
int height
Definition: frame.h:397
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:553
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:276
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
qsv_iopatterns
static const struct @120 qsv_iopatterns[]
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:39
QSVSession::session
mfxSession session
Definition: qsv_internal.h:98
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:184
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:457
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:275
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: codec_par.h:39
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
Definition: qsv.c:517
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:107
AVFieldOrder
AVFieldOrder
Definition: codec_par.h:37
QSVFramesContext
Definition: qsv_internal.h:105
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:540
qsv_create_mids
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:480
imgutils.h
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:190
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:869
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:52
QSVMid
Definition: qsv_internal.h:68
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:166
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:241
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:413
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:650