FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfx/mfxvideo.h>
22 #include <mfx/mfxplugin.h>
23 #include <mfx/mfxjpeg.h>
24 
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/common.h"
30 #include "libavutil/error.h"
31 #include "libavutil/hwcontext.h"
33 #include "libavutil/imgutils.h"
34 #include "libavutil/avassert.h"
35 
36 #include "avcodec.h"
37 #include "qsv_internal.h"
38 
39 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
40 
41 #include "mfx/mfxvp8.h"
42 
44 {
45  switch (codec_id) {
46  case AV_CODEC_ID_H264:
47  return MFX_CODEC_AVC;
48  case AV_CODEC_ID_HEVC:
49  return MFX_CODEC_HEVC;
52  return MFX_CODEC_MPEG2;
53  case AV_CODEC_ID_VC1:
54  return MFX_CODEC_VC1;
55  case AV_CODEC_ID_VP8:
56  return MFX_CODEC_VP8;
57  case AV_CODEC_ID_MJPEG:
58  return MFX_CODEC_JPEG;
59  case AV_CODEC_ID_VP9:
60  return MFX_CODEC_VP9;
61 #if QSV_VERSION_ATLEAST(1, 34)
62  case AV_CODEC_ID_AV1:
63  return MFX_CODEC_AV1;
64 #endif
65 
66  default:
67  break;
68  }
69 
70  return AVERROR(ENOSYS);
71 }
72 
73 static const struct {
75  const char *desc;
76 } qsv_iopatterns[] = {
77  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
78  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
79  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
80  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
81  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
82  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
83 };
84 
85 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
86  const char *extra_string)
87 {
88  const char *desc = NULL;
89 
90  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
92  desc = qsv_iopatterns[i].desc;
93  }
94  }
95  if (!desc)
96  desc = "unknown iopattern";
97 
98  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
99  return 0;
100 }
101 
102 static const struct {
103  mfxStatus mfxerr;
104  int averr;
105  const char *desc;
106 } qsv_errors[] = {
107  { MFX_ERR_NONE, 0, "success" },
108  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
109  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
110  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
111  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
112  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
113  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
114  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
115  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
116  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
117  /* the following 3 errors should always be handled explicitly, so those "mappings"
118  * are for completeness only */
119  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
120  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
121  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
122  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
123  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
124  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
125  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
126  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
127  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
128  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
129  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
130 
131  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
132  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
133  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
134  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
135  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
136  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
137  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
138  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
139  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
140 };
141 
142 /**
143  * Convert a libmfx error code into an FFmpeg error code.
144  */
145 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
146 {
147  int i;
148  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
149  if (qsv_errors[i].mfxerr == mfx_err) {
150  if (desc)
151  *desc = qsv_errors[i].desc;
152  return qsv_errors[i].averr;
153  }
154  }
155  if (desc)
156  *desc = "unknown error";
157  return AVERROR_UNKNOWN;
158 }
159 
160 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
161  const char *error_string)
162 {
163  const char *desc;
164  int ret = qsv_map_error(err, &desc);
165  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
166  return ret;
167 }
168 
169 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
170  const char *warning_string)
171 {
172  const char *desc;
173  int ret = qsv_map_error(err, &desc);
174  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
175  return ret;
176 }
177 
179 {
180  switch (fourcc) {
181  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
182  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
183  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
184  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
185  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
186 #if CONFIG_VAAPI
187  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
188  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
189 #endif
190  }
191  return AV_PIX_FMT_NONE;
192 }
193 
195 {
196  switch (format) {
197  case AV_PIX_FMT_YUV420P:
198  case AV_PIX_FMT_YUVJ420P:
199  case AV_PIX_FMT_NV12:
200  *fourcc = MFX_FOURCC_NV12;
201  return AV_PIX_FMT_NV12;
203  case AV_PIX_FMT_P010:
204  *fourcc = MFX_FOURCC_P010;
205  return AV_PIX_FMT_P010;
206  case AV_PIX_FMT_X2RGB10:
207  *fourcc = MFX_FOURCC_A2RGB10;
208  return AV_PIX_FMT_X2RGB10;
209  case AV_PIX_FMT_BGRA:
210  *fourcc = MFX_FOURCC_RGB4;
211  return AV_PIX_FMT_BGRA;
212 #if CONFIG_VAAPI
213  case AV_PIX_FMT_YUV422P:
214  case AV_PIX_FMT_YUYV422:
215  *fourcc = MFX_FOURCC_YUY2;
216  return AV_PIX_FMT_YUYV422;
218  case AV_PIX_FMT_Y210:
219  *fourcc = MFX_FOURCC_Y210;
220  return AV_PIX_FMT_Y210;
221 #endif
222  default:
223  return AVERROR(ENOSYS);
224  }
225 }
226 
227 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
228 {
229  switch (frame->format) {
230  case AV_PIX_FMT_NV12:
231  case AV_PIX_FMT_P010:
232  surface->Data.Y = frame->data[0];
233  surface->Data.UV = frame->data[1];
234  /* The SDK checks Data.V when using system memory for VP9 encoding */
235  surface->Data.V = surface->Data.UV + 1;
236  break;
238  case AV_PIX_FMT_BGRA:
239  surface->Data.B = frame->data[0];
240  surface->Data.G = frame->data[0] + 1;
241  surface->Data.R = frame->data[0] + 2;
242  surface->Data.A = frame->data[0] + 3;
243  break;
244  case AV_PIX_FMT_YUYV422:
245  surface->Data.Y = frame->data[0];
246  surface->Data.U = frame->data[0] + 1;
247  surface->Data.V = frame->data[0] + 3;
248  break;
249 
250  case AV_PIX_FMT_Y210:
251  surface->Data.Y16 = (mfxU16 *)frame->data[0];
252  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
253  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
254  break;
255  default:
256  return AVERROR(ENOSYS);
257  }
258  surface->Data.PitchLow = frame->linesize[0];
259 
260  return 0;
261 }
262 
264 {
265  int i;
266  for (i = 0; i < ctx->nb_mids; i++) {
267  QSVMid *mid = &ctx->mids[i];
268  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
269  if ((mid->handle_pair->first == pair->first) &&
270  (mid->handle_pair->second == pair->second))
271  return i;
272  }
273  return AVERROR_BUG;
274 }
275 
276 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
277 {
279  switch (mfx_pic_struct & 0xF) {
280  case MFX_PICSTRUCT_PROGRESSIVE:
282  break;
283  case MFX_PICSTRUCT_FIELD_TFF:
284  field = AV_FIELD_TT;
285  break;
286  case MFX_PICSTRUCT_FIELD_BFF:
287  field = AV_FIELD_BB;
288  break;
289  }
290 
291  return field;
292 }
293 
294 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
295 {
296  enum AVPictureType type;
297  switch (mfx_pic_type & 0x7) {
298  case MFX_FRAMETYPE_I:
299  if (mfx_pic_type & MFX_FRAMETYPE_S)
301  else
303  break;
304  case MFX_FRAMETYPE_B:
306  break;
307  case MFX_FRAMETYPE_P:
308  if (mfx_pic_type & MFX_FRAMETYPE_S)
310  else
312  break;
313  case MFX_FRAMETYPE_UNKNOWN:
315  break;
316  default:
317  av_assert0(0);
318  }
319 
320  return type;
321 }
322 
323 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
324  void *logctx)
325 {
326  if (!load_plugins || !*load_plugins)
327  return 0;
328 
329  while (*load_plugins) {
330  mfxPluginUID uid;
331  mfxStatus ret;
332  int i, err = 0;
333 
334  char *plugin = av_get_token(&load_plugins, ":");
335  if (!plugin)
336  return AVERROR(ENOMEM);
337  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
338  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
339  err = AVERROR(EINVAL);
340  goto load_plugin_fail;
341  }
342 
343  for (i = 0; i < sizeof(uid.Data); i++) {
344  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
345  if (err != 1) {
346  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
347  err = AVERROR(EINVAL);
348  goto load_plugin_fail;
349  }
350 
351  }
352 
353  ret = MFXVideoUSER_Load(session, &uid, 1);
354  if (ret < 0) {
355  char errorbuf[128];
356  snprintf(errorbuf, sizeof(errorbuf),
357  "Could not load the requested plugin '%s'", plugin);
358  err = ff_qsv_print_error(logctx, ret, errorbuf);
359  goto load_plugin_fail;
360  }
361 
362  if (*load_plugins)
363  load_plugins++;
364 load_plugin_fail:
365  av_freep(&plugin);
366  if (err < 0)
367  return err;
368  }
369 
370  return 0;
371 
372 }
373 
374 //This code is only required for Linux since a display handle is required.
375 //For Windows the session is complete and ready to use.
376 
377 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
378 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
379 {
380  AVDictionary *child_device_opts = NULL;
381  AVVAAPIDeviceContext *hwctx;
382  int ret;
383 
384  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
385  av_dict_set(&child_device_opts, "driver", "iHD", 0);
386 
387  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
388  av_dict_free(&child_device_opts);
389  if (ret < 0) {
390  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
391  return ret;
392  } else {
393  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
394  hwctx = qs->va_device_ctx->hwctx;
395 
396  ret = MFXVideoCORE_SetHandle(qs->session,
397  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
398  if (ret < 0) {
399  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
400  }
401  }
402 
403  return 0;
404 }
405 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
406 
408  const char *load_plugins, int gpu_copy)
409 {
410 #if CONFIG_D3D11VA
411  mfxIMPL impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
412 #else
413  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
414 #endif
415  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
416  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
417 
418  const char *desc;
419  int ret;
420 
421  init_par.GPUCopy = gpu_copy;
422  init_par.Implementation = impl;
423  init_par.Version = ver;
424  ret = MFXInitEx(init_par, &qs->session);
425  if (ret < 0)
426  return ff_qsv_print_error(avctx, ret,
427  "Error initializing an internal MFX session");
428 
429 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
430  ret = ff_qsv_set_display_handle(avctx, qs);
431  if (ret < 0)
432  return ret;
433 #endif
434 
435  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
436  if (ret < 0) {
437  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
438  return ret;
439  }
440 
441  ret = MFXQueryIMPL(qs->session, &impl);
442  if (ret != MFX_ERR_NONE)
443  return ff_qsv_print_error(avctx, ret,
444  "Error querying the session attributes");
445 
446  switch (MFX_IMPL_BASETYPE(impl)) {
447  case MFX_IMPL_SOFTWARE:
448  desc = "software";
449  break;
450  case MFX_IMPL_HARDWARE:
451  case MFX_IMPL_HARDWARE2:
452  case MFX_IMPL_HARDWARE3:
453  case MFX_IMPL_HARDWARE4:
454  desc = "hardware accelerated";
455  break;
456  default:
457  desc = "unknown";
458  }
459 
460  av_log(avctx, AV_LOG_VERBOSE,
461  "Initialized an internal MFX session using %s implementation\n",
462  desc);
463 
464  return 0;
465 }
466 
467 static void mids_buf_free(void *opaque, uint8_t *data)
468 {
469  AVBufferRef *hw_frames_ref = opaque;
470  av_buffer_unref(&hw_frames_ref);
471  av_freep(&data);
472 }
473 
474 static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
475 {
476  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
477  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
478  int nb_surfaces = frames_hwctx->nb_surfaces;
479 
480  AVBufferRef *mids_buf, *hw_frames_ref1;
481  QSVMid *mids;
482  int i;
483 
484  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
485  if (!hw_frames_ref1)
486  return NULL;
487 
488  mids = av_calloc(nb_surfaces, sizeof(*mids));
489  if (!mids) {
490  av_buffer_unref(&hw_frames_ref1);
491  return NULL;
492  }
493 
494  mids_buf = av_buffer_create((uint8_t*)mids, nb_surfaces * sizeof(*mids),
495  mids_buf_free, hw_frames_ref1, 0);
496  if (!mids_buf) {
497  av_buffer_unref(&hw_frames_ref1);
498  av_freep(&mids);
499  return NULL;
500  }
501 
502  for (i = 0; i < nb_surfaces; i++) {
503  QSVMid *mid = &mids[i];
504  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
505  mid->hw_frames_ref = hw_frames_ref1;
506  }
507 
508  return mids_buf;
509 }
510 
511 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
512  AVBufferRef *mids_buf)
513 {
514  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
515  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
516  QSVMid *mids = (QSVMid*)mids_buf->data;
517  int nb_surfaces = frames_hwctx->nb_surfaces;
518  int i;
519 
520  // the allocated size of the array is two larger than the number of
521  // surfaces, we store the references to the frames context and the
522  // QSVMid array there
523  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
524  if (!resp->mids)
525  return AVERROR(ENOMEM);
526 
527  for (i = 0; i < nb_surfaces; i++)
528  resp->mids[i] = &mids[i];
529  resp->NumFrameActual = nb_surfaces;
530 
531  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
532  if (!resp->mids[resp->NumFrameActual]) {
533  av_freep(&resp->mids);
534  return AVERROR(ENOMEM);
535  }
536 
537  resp->mids[resp->NumFrameActual + 1] = av_buffer_ref(mids_buf);
538  if (!resp->mids[resp->NumFrameActual + 1]) {
539  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
540  av_freep(&resp->mids);
541  return AVERROR(ENOMEM);
542  }
543 
544  return 0;
545 }
546 
547 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
548  mfxFrameAllocResponse *resp)
549 {
550  QSVFramesContext *ctx = pthis;
551  int ret;
552 
553  /* this should only be called from an encoder or decoder and
554  * only allocates video memory frames */
555  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
556  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
557  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
558  return MFX_ERR_UNSUPPORTED;
559 
560  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
561  /* external frames -- fill from the caller-supplied frames context */
562  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
563  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
564  mfxFrameInfo *i = &req->Info;
565  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
566 
567  if (i->Width > i1->Width || i->Height > i1->Height ||
568  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
569  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
570  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
571  i->Width, i->Height, i->FourCC, i->ChromaFormat,
572  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
573  return MFX_ERR_UNSUPPORTED;
574  }
575 
576  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
577  if (ret < 0) {
578  av_log(ctx->logctx, AV_LOG_ERROR,
579  "Error filling an external frame allocation request\n");
580  return MFX_ERR_MEMORY_ALLOC;
581  }
582  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
583  /* internal frames -- allocate a new hw frames context */
584  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
585  mfxFrameInfo *i = &req->Info;
586 
587  AVBufferRef *frames_ref, *mids_buf;
588  AVHWFramesContext *frames_ctx;
589  AVQSVFramesContext *frames_hwctx;
590 
591  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
592  if (!frames_ref)
593  return MFX_ERR_MEMORY_ALLOC;
594 
595  frames_ctx = (AVHWFramesContext*)frames_ref->data;
596  frames_hwctx = frames_ctx->hwctx;
597 
598  frames_ctx->format = AV_PIX_FMT_QSV;
599  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
600  frames_ctx->width = i->Width;
601  frames_ctx->height = i->Height;
602  frames_ctx->initial_pool_size = req->NumFrameSuggested;
603 
604  frames_hwctx->frame_type = req->Type;
605 
606  ret = av_hwframe_ctx_init(frames_ref);
607  if (ret < 0) {
608  av_log(ctx->logctx, AV_LOG_ERROR,
609  "Error initializing a frames context for an internal frame "
610  "allocation request\n");
611  av_buffer_unref(&frames_ref);
612  return MFX_ERR_MEMORY_ALLOC;
613  }
614 
615  mids_buf = qsv_create_mids(frames_ref);
616  if (!mids_buf) {
617  av_buffer_unref(&frames_ref);
618  return MFX_ERR_MEMORY_ALLOC;
619  }
620 
621  ret = qsv_setup_mids(resp, frames_ref, mids_buf);
622  av_buffer_unref(&mids_buf);
623  av_buffer_unref(&frames_ref);
624  if (ret < 0) {
625  av_log(ctx->logctx, AV_LOG_ERROR,
626  "Error filling an internal frame allocation request\n");
627  return MFX_ERR_MEMORY_ALLOC;
628  }
629  } else {
630  return MFX_ERR_UNSUPPORTED;
631  }
632 
633  return MFX_ERR_NONE;
634 }
635 
636 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
637 {
638  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
639  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual + 1]);
640  av_freep(&resp->mids);
641  return MFX_ERR_NONE;
642 }
643 
644 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
645 {
646  QSVMid *qsv_mid = mid;
647  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
648  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
649  int ret;
650 
651  if (qsv_mid->locked_frame)
652  return MFX_ERR_UNDEFINED_BEHAVIOR;
653 
654  /* Allocate a system memory frame that will hold the mapped data. */
655  qsv_mid->locked_frame = av_frame_alloc();
656  if (!qsv_mid->locked_frame)
657  return MFX_ERR_MEMORY_ALLOC;
658  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
659 
660  /* wrap the provided handle in a hwaccel AVFrame */
661  qsv_mid->hw_frame = av_frame_alloc();
662  if (!qsv_mid->hw_frame)
663  goto fail;
664 
665  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
666  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
667 
668  // doesn't really matter what buffer is used here
669  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
670  if (!qsv_mid->hw_frame->buf[0])
671  goto fail;
672 
673  qsv_mid->hw_frame->width = hw_frames_ctx->width;
674  qsv_mid->hw_frame->height = hw_frames_ctx->height;
675 
676  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
677  if (!qsv_mid->hw_frame->hw_frames_ctx)
678  goto fail;
679 
680  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
681  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
682 
683  /* map the data to the system memory */
684  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
686  if (ret < 0)
687  goto fail;
688 
689  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
690  ptr->Y = qsv_mid->locked_frame->data[0];
691  ptr->U = qsv_mid->locked_frame->data[1];
692  ptr->V = qsv_mid->locked_frame->data[1] + 1;
693 
694  return MFX_ERR_NONE;
695 fail:
696  av_frame_free(&qsv_mid->hw_frame);
697  av_frame_free(&qsv_mid->locked_frame);
698  return MFX_ERR_MEMORY_ALLOC;
699 }
700 
701 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
702 {
703  QSVMid *qsv_mid = mid;
704 
705  av_frame_free(&qsv_mid->locked_frame);
706  av_frame_free(&qsv_mid->hw_frame);
707 
708  return MFX_ERR_NONE;
709 }
710 
711 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
712 {
713  QSVMid *qsv_mid = (QSVMid*)mid;
714  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
715  mfxHDLPair *pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
716 
717  pair_dst->first = pair_src->first;
718 
719  if (pair_src->second != (mfxMemId)MFX_INFINITE)
720  pair_dst->second = pair_src->second;
721  return MFX_ERR_NONE;
722 }
723 
724 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
725  AVBufferRef *device_ref, const char *load_plugins,
726  int gpu_copy)
727 {
728  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
729  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
730  mfxSession parent_session = device_hwctx->session;
731  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
732  mfxHDL handle = NULL;
733  int hw_handle_supported = 0;
734 
735  mfxSession session;
736  mfxVersion ver;
737  mfxIMPL impl;
738  mfxHandleType handle_type;
739  mfxStatus err;
740  int ret;
741 
742  err = MFXQueryIMPL(parent_session, &impl);
743  if (err == MFX_ERR_NONE)
744  err = MFXQueryVersion(parent_session, &ver);
745  if (err != MFX_ERR_NONE)
746  return ff_qsv_print_error(avctx, err,
747  "Error querying the session attributes");
748 
749  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
750  handle_type = MFX_HANDLE_VA_DISPLAY;
751  hw_handle_supported = 1;
752  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
753  handle_type = MFX_HANDLE_D3D11_DEVICE;
754  hw_handle_supported = 1;
755  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
756  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
757  hw_handle_supported = 1;
758  }
759 
760  if (hw_handle_supported) {
761  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
762  if (err != MFX_ERR_NONE) {
763  return ff_qsv_print_error(avctx, err,
764  "Error getting handle session");
765  }
766  }
767  if (!handle) {
768  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
769  "from the session\n");
770  }
771 
772  init_par.GPUCopy = gpu_copy;
773  init_par.Implementation = impl;
774  init_par.Version = ver;
775  err = MFXInitEx(init_par, &session);
776  if (err != MFX_ERR_NONE)
777  return ff_qsv_print_error(avctx, err,
778  "Error initializing a child MFX session");
779 
780  if (handle) {
781  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
782  if (err != MFX_ERR_NONE)
783  return ff_qsv_print_error(avctx, err,
784  "Error setting a HW handle");
785  }
786 
787  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
788  err = MFXJoinSession(parent_session, session);
789  if (err != MFX_ERR_NONE)
790  return ff_qsv_print_error(avctx, err,
791  "Error joining session");
792  }
793 
794  ret = qsv_load_plugins(session, load_plugins, avctx);
795  if (ret < 0) {
796  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
797  return ret;
798  }
799 
800  *psession = session;
801  return 0;
802 }
803 
804 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
805  QSVFramesContext *qsv_frames_ctx,
806  const char *load_plugins, int opaque, int gpu_copy)
807 {
808  mfxFrameAllocator frame_allocator = {
809  .pthis = qsv_frames_ctx,
810  .Alloc = qsv_frame_alloc,
811  .Lock = qsv_frame_lock,
812  .Unlock = qsv_frame_unlock,
813  .GetHDL = qsv_frame_get_hdl,
814  .Free = qsv_frame_free,
815  };
816 
817  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
818  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
819 
820  mfxSession session;
821  mfxStatus err;
822 
823  int ret;
824 
825  ret = ff_qsv_init_session_device(avctx, &session,
826  frames_ctx->device_ref, load_plugins, gpu_copy);
827  if (ret < 0)
828  return ret;
829 
830  if (!opaque) {
831  qsv_frames_ctx->logctx = avctx;
832 
833  /* allocate the memory ids for the external frames */
834  av_buffer_unref(&qsv_frames_ctx->mids_buf);
835  qsv_frames_ctx->mids_buf = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
836  if (!qsv_frames_ctx->mids_buf)
837  return AVERROR(ENOMEM);
838  qsv_frames_ctx->mids = (QSVMid*)qsv_frames_ctx->mids_buf->data;
839  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
840 
841  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
842  if (err != MFX_ERR_NONE)
843  return ff_qsv_print_error(avctx, err,
844  "Error setting a frame allocator");
845  }
846 
847  *psession = session;
848  return 0;
849 }
850 
852 {
853  if (qs->session) {
854  MFXClose(qs->session);
855  qs->session = NULL;
856  }
857 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
858  av_buffer_unref(&qs->va_device_ref);
859 #endif
860  return 0;
861 }
862 
864  mfxExtBuffer * param)
865 {
866  int i;
867 
868  for (i = 0; i < frame->num_ext_params; i++) {
869  mfxExtBuffer *ext_buffer = frame->ext_param[i];
870 
871  if (ext_buffer->BufferId == param->BufferId) {
872  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
873  "added\n");
874  return;
875  }
876  }
877 
878  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
879  frame->ext_param[frame->num_ext_params] = param;
880  frame->num_ext_params++;
881  frame->surface.Data.NumExtParam = frame->num_ext_params;
882  } else {
883  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
884  "have enough space\n");
885  }
886 
887 
888 }
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:58
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:701
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:115
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2200
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:48
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:106
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:154
averr
int averr
Definition: qsv.c:104
QSVMid::locked_frame
AVFrame * locked_frame
Definition: qsv_internal.h:72
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVPictureType
AVPictureType
Definition: avutil.h:272
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:851
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:294
AVFrame::width
int width
Definition: frame.h:397
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:790
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:263
data
const char data[16]
Definition: mxf.c:143
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:406
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:540
AVDictionary
Definition: dict.c:30
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:724
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:227
AV_FIELD_TT
@ AV_FIELD_TT
Definition: codec_par.h:40
fail
#define fail()
Definition: checkasm.h:131
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:323
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
desc
const char * desc
Definition: qsv.c:75
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:458
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:169
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:49
QSVMid::hw_frame
AVFrame * hw_frame
Definition: qsv_internal.h:73
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:218
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:64
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: codec_par.h:38
ctx
AVFormatContext * ctx
Definition: movenc.c:48
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:371
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:77
if
if(ret)
Definition: filter_design.txt:179
mids_buf_free
static void mids_buf_free(void *opaque, uint8_t *data)
Definition: qsv.c:467
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:804
QSVFrame
Definition: qsv_internal.h:77
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:279
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:278
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:85
QSVFramesContext::mids_buf
AVBufferRef * mids_buf
Definition: qsv_internal.h:113
qsv_iopatterns
static const struct @114 qsv_iopatterns[]
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:407
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:279
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:51
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:47
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:194
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:353
mfxerr
mfxStatus mfxerr
Definition: qsv.c:103
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:69
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:273
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:74
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:70
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
Definition: qsv_internal.h:114
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_FIELD_BB
@ AV_FIELD_BB
Definition: codec_par.h:41
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:57
hwcontext_qsv.h
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:276
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:224
QSVMid::surf
mfxFrameSurface1 surf
Definition: qsv_internal.h:74
qsv_errors
static const struct @115 qsv_errors[]
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:459
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:120
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:145
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:636
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVSession
Definition: qsv_internal.h:97
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:43
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:711
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:659
AVCodecContext
main external API structure.
Definition: avcodec.h:389
AVFrame::height
int height
Definition: frame.h:397
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:547
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:276
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:39
QSVSession::session
mfxSession session
Definition: qsv_internal.h:98
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:178
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:455
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:275
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: codec_par.h:39
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
Definition: qsv.c:511
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:107
AVFieldOrder
AVFieldOrder
Definition: codec_par.h:37
QSVFramesContext
Definition: qsv_internal.h:105
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
qsv_create_mids
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:474
imgutils.h
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:190
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:863
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:52
QSVMid
Definition: qsv_internal.h:68
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:160
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:241
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:407
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:644