FFmpeg
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 
40 typedef struct QSVFrame {
41  AVFrame *frame;
42  mfxFrameSurface1 *surface;
43  mfxFrameSurface1 surface_internal; /* for system memory */
44  struct QSVFrame *next;
45 } QSVFrame;
46 
47 /* abstract struct for all QSV filters */
48 struct QSVVPPContext {
49  mfxSession session;
50  int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
51  enum AVPixelFormat out_sw_format; /* Real output format */
52  mfxVideoParam vpp_param;
53  mfxFrameInfo *frame_infos; /* frame info for each input */
54 
55  /* members related to the input/output surface */
62  mfxFrameSurface1 **surface_ptrs_in;
63  mfxFrameSurface1 **surface_ptrs_out;
64 
65  /* MFXVPP extern parameters */
66  mfxExtOpaqueSurfaceAlloc opaque_alloc;
67  mfxExtBuffer **ext_buffers;
69 };
70 
71 static const mfxHandleType handle_types[] = {
72  MFX_HANDLE_VA_DISPLAY,
73  MFX_HANDLE_D3D9_DEVICE_MANAGER,
74  MFX_HANDLE_D3D11_DEVICE,
75 };
76 
77 static const AVRational default_tb = { 1, 90000 };
78 
79 /* functions for frameAlloc */
80 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
81  mfxFrameAllocResponse *resp)
82 {
83  QSVVPPContext *s = pthis;
84  int i;
85 
86  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
87  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
88  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
89  return MFX_ERR_UNSUPPORTED;
90 
91  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
92  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
93  if (!resp->mids)
94  return AVERROR(ENOMEM);
95 
96  for (i = 0; i < s->nb_surface_ptrs_in; i++)
97  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
98 
99  resp->NumFrameActual = s->nb_surface_ptrs_in;
100  } else {
101  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
102  if (!resp->mids)
103  return AVERROR(ENOMEM);
104 
105  for (i = 0; i < s->nb_surface_ptrs_out; i++)
106  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
107 
108  resp->NumFrameActual = s->nb_surface_ptrs_out;
109  }
110 
111  return MFX_ERR_NONE;
112 }
113 
114 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
115 {
116  av_freep(&resp->mids);
117  return MFX_ERR_NONE;
118 }
119 
120 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
121 {
122  return MFX_ERR_UNSUPPORTED;
123 }
124 
125 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
126 {
127  return MFX_ERR_UNSUPPORTED;
128 }
129 
130 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
131 {
132  *hdl = mid;
133  return MFX_ERR_NONE;
134 }
135 
137 {
138  switch (format) {
139  case AV_PIX_FMT_YUV420P:
140  return MFX_FOURCC_YV12;
141  case AV_PIX_FMT_NV12:
142  return MFX_FOURCC_NV12;
143  case AV_PIX_FMT_YUYV422:
144  return MFX_FOURCC_YUY2;
145  case AV_PIX_FMT_BGRA:
146  return MFX_FOURCC_RGB4;
147  }
148 
149  return MFX_FOURCC_NV12;
150 }
151 
152 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
153 {
154  switch (frame->format) {
155  case AV_PIX_FMT_NV12:
156  case AV_PIX_FMT_P010:
157  surface->Data.Y = frame->data[0];
158  surface->Data.UV = frame->data[1];
159  break;
160  case AV_PIX_FMT_YUV420P:
161  surface->Data.Y = frame->data[0];
162  surface->Data.U = frame->data[1];
163  surface->Data.V = frame->data[2];
164  break;
165  case AV_PIX_FMT_YUYV422:
166  surface->Data.Y = frame->data[0];
167  surface->Data.U = frame->data[0] + 1;
168  surface->Data.V = frame->data[0] + 3;
169  break;
170  case AV_PIX_FMT_RGB32:
171  surface->Data.B = frame->data[0];
172  surface->Data.G = frame->data[0] + 1;
173  surface->Data.R = frame->data[0] + 2;
174  surface->Data.A = frame->data[0] + 3;
175  break;
176  default:
177  return MFX_ERR_UNSUPPORTED;
178  }
179  surface->Data.Pitch = frame->linesize[0];
180 
181  return 0;
182 }
183 
184 /* fill the surface info */
185 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
186 {
187  enum AVPixelFormat pix_fmt;
188  AVHWFramesContext *frames_ctx;
189  AVQSVFramesContext *frames_hwctx;
190  const AVPixFmtDescriptor *desc;
191 
192  if (link->format == AV_PIX_FMT_QSV) {
193  if (!link->hw_frames_ctx)
194  return AVERROR(EINVAL);
195 
196  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
197  frames_hwctx = frames_ctx->hwctx;
198  *frameinfo = frames_hwctx->surfaces[0].Info;
199  } else {
200  pix_fmt = link->format;
201  desc = av_pix_fmt_desc_get(pix_fmt);
202  if (!desc)
203  return AVERROR_BUG;
204 
205  frameinfo->CropX = 0;
206  frameinfo->CropY = 0;
207  frameinfo->Width = FFALIGN(link->w, 32);
208  frameinfo->Height = FFALIGN(link->h, 32);
209  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
210  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
211  frameinfo->BitDepthLuma = desc->comp[0].depth;
212  frameinfo->BitDepthChroma = desc->comp[0].depth;
213  frameinfo->Shift = desc->comp[0].depth > 8;
214  if (desc->log2_chroma_w && desc->log2_chroma_h)
215  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
216  else if (desc->log2_chroma_w)
217  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
218  else
219  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
220  }
221 
222  frameinfo->CropW = link->w;
223  frameinfo->CropH = link->h;
224  frameinfo->FrameRateExtN = link->frame_rate.num;
225  frameinfo->FrameRateExtD = link->frame_rate.den;
226  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
227  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
228 
229  return 0;
230 }
231 
233 {
234  while (list) {
235  if (list->surface && !list->surface->Data.Locked) {
236  list->surface = NULL;
237  av_frame_free(&list->frame);
238  }
239  list = list->next;
240  }
241 }
242 
244 {
245  while (*list) {
246  QSVFrame *frame;
247 
248  frame = *list;
249  *list = (*list)->next;
250  av_frame_free(&frame->frame);
251  av_freep(&frame);
252  }
253 }
254 
256 {
257  QSVFrame *out = *list;
258 
259  for (; out; out = out->next) {
260  if (!out->surface)
261  break;
262  }
263 
264  if (!out) {
265  out = av_mallocz(sizeof(*out));
266  if (!out) {
267  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
268  return NULL;
269  }
270  out->next = *list;
271  *list = out;
272  }
273 
274  return out;
275 }
276 
277 /* get the input surface */
279 {
280  QSVFrame *qsv_frame;
281  AVFilterContext *ctx = inlink->dst;
282 
284 
285  qsv_frame = get_free_frame(&s->in_frame_list);
286  if (!qsv_frame)
287  return NULL;
288 
289  /* Turn AVFrame into mfxFrameSurface1.
290  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
291  * mfxFrameSurface1 is stored in AVFrame->data[3];
292  * for system memory mode, raw video data is stored in
293  * AVFrame, we should map it into mfxFrameSurface1.
294  */
295  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
296  if (picref->format != AV_PIX_FMT_QSV) {
297  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
298  return NULL;
299  }
300  qsv_frame->frame = av_frame_clone(picref);
301  qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
302  } else {
303  /* make a copy if the input is not padded as libmfx requires */
304  if (picref->height & 31 || picref->linesize[0] & 31) {
305  qsv_frame->frame = ff_get_video_buffer(inlink,
306  FFALIGN(inlink->w, 32),
307  FFALIGN(inlink->h, 32));
308  if (!qsv_frame->frame)
309  return NULL;
310 
311  qsv_frame->frame->width = picref->width;
312  qsv_frame->frame->height = picref->height;
313 
314  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
315  av_frame_free(&qsv_frame->frame);
316  return NULL;
317  }
318 
319  av_frame_copy_props(qsv_frame->frame, picref);
320  } else
321  qsv_frame->frame = av_frame_clone(picref);
322 
323  if (map_frame_to_surface(qsv_frame->frame,
324  &qsv_frame->surface_internal) < 0) {
325  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
326  return NULL;
327  }
328  qsv_frame->surface = &qsv_frame->surface_internal;
329  }
330 
331  qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
332  qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
333  inlink->time_base, default_tb);
334 
335  qsv_frame->surface->Info.PicStruct =
336  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
337  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
338  MFX_PICSTRUCT_FIELD_BFF);
339  if (qsv_frame->frame->repeat_pict == 1)
340  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
341  else if (qsv_frame->frame->repeat_pict == 2)
342  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
343  else if (qsv_frame->frame->repeat_pict == 4)
344  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
345 
346  return qsv_frame;
347 }
348 
349 /* get the output surface */
351 {
352  AVFilterContext *ctx = outlink->src;
353  QSVFrame *out_frame;
354  int ret;
355 
357 
358  out_frame = get_free_frame(&s->out_frame_list);
359  if (!out_frame)
360  return NULL;
361 
362  /* For video memory, get a hw frame;
363  * For system memory, get a sw frame and map it into a mfx_surface. */
364  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
365  out_frame->frame = av_frame_alloc();
366  if (!out_frame->frame)
367  return NULL;
368 
369  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
370  if (ret < 0) {
371  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
372  return NULL;
373  }
374 
375  out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
376  } else {
377  /* Get a frame with aligned dimensions.
378  * Libmfx need system memory being 128x64 aligned */
379  out_frame->frame = ff_get_video_buffer(outlink,
380  FFALIGN(outlink->w, 128),
381  FFALIGN(outlink->h, 64));
382  if (!out_frame->frame)
383  return NULL;
384 
385  out_frame->frame->width = outlink->w;
386  out_frame->frame->height = outlink->h;
387 
388  ret = map_frame_to_surface(out_frame->frame,
389  &out_frame->surface_internal);
390  if (ret < 0)
391  return NULL;
392 
393  out_frame->surface = &out_frame->surface_internal;
394  }
395 
396  out_frame->surface->Info = s->vpp_param.vpp.Out;
397 
398  return out_frame;
399 }
400 
401 /* create the QSV session */
403 {
404  AVFilterLink *inlink = avctx->inputs[0];
405  AVFilterLink *outlink = avctx->outputs[0];
406  AVQSVFramesContext *in_frames_hwctx = NULL;
407  AVQSVFramesContext *out_frames_hwctx = NULL;
408 
409  AVBufferRef *device_ref;
410  AVHWDeviceContext *device_ctx;
411  AVQSVDeviceContext *device_hwctx;
412  mfxHDL handle;
413  mfxHandleType handle_type;
414  mfxVersion ver;
415  mfxIMPL impl;
416  int ret, i;
417 
418  if (inlink->hw_frames_ctx) {
419  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
420 
421  device_ref = frames_ctx->device_ref;
422  in_frames_hwctx = frames_ctx->hwctx;
423 
424  s->in_mem_mode = in_frames_hwctx->frame_type;
425 
426  s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
427  sizeof(*s->surface_ptrs_in));
428  if (!s->surface_ptrs_in)
429  return AVERROR(ENOMEM);
430 
431  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
432  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
433 
434  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
435  } else if (avctx->hw_device_ctx) {
436  device_ref = avctx->hw_device_ctx;
437  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
438  } else {
439  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
440  return AVERROR(EINVAL);
441  }
442 
443  device_ctx = (AVHWDeviceContext *)device_ref->data;
444  device_hwctx = device_ctx->hwctx;
445 
446  if (outlink->format == AV_PIX_FMT_QSV) {
447  AVHWFramesContext *out_frames_ctx;
448  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
449  if (!out_frames_ref)
450  return AVERROR(ENOMEM);
451 
453  MFX_MEMTYPE_OPAQUE_FRAME :
454  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
455 
456  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
457  out_frames_hwctx = out_frames_ctx->hwctx;
458 
459  out_frames_ctx->format = AV_PIX_FMT_QSV;
460  out_frames_ctx->width = FFALIGN(outlink->w, 32);
461  out_frames_ctx->height = FFALIGN(outlink->h, 32);
462  out_frames_ctx->sw_format = s->out_sw_format;
463  out_frames_ctx->initial_pool_size = 64;
464  if (avctx->extra_hw_frames > 0)
465  out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
466  out_frames_hwctx->frame_type = s->out_mem_mode;
467 
468  ret = av_hwframe_ctx_init(out_frames_ref);
469  if (ret < 0) {
470  av_buffer_unref(&out_frames_ref);
471  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
472  return ret;
473  }
474 
475  s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
476  sizeof(*s->surface_ptrs_out));
477  if (!s->surface_ptrs_out) {
478  av_buffer_unref(&out_frames_ref);
479  return AVERROR(ENOMEM);
480  }
481 
482  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
483  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
484  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
485 
486  av_buffer_unref(&outlink->hw_frames_ctx);
487  outlink->hw_frames_ctx = out_frames_ref;
488  } else
489  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
490 
491  /* extract the properties of the "master" session given to us */
492  ret = MFXQueryIMPL(device_hwctx->session, &impl);
493  if (ret == MFX_ERR_NONE)
494  ret = MFXQueryVersion(device_hwctx->session, &ver);
495  if (ret != MFX_ERR_NONE) {
496  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
497  return AVERROR_UNKNOWN;
498  }
499 
500  for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
501  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
502  if (ret == MFX_ERR_NONE) {
503  handle_type = handle_types[i];
504  break;
505  }
506  }
507 
508  if (ret != MFX_ERR_NONE) {
509  av_log(avctx, AV_LOG_ERROR, "Error getting the session handle\n");
510  return AVERROR_UNKNOWN;
511  }
512 
513  /* create a "slave" session with those same properties, to be used for vpp */
514  ret = MFXInit(impl, &ver, &s->session);
515  if (ret != MFX_ERR_NONE) {
516  av_log(avctx, AV_LOG_ERROR, "Error initializing a session for scaling\n");
517  return AVERROR_UNKNOWN;
518  }
519 
520  if (handle) {
521  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
522  if (ret != MFX_ERR_NONE)
523  return AVERROR_UNKNOWN;
524  }
525 
526  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
527  ret = MFXJoinSession(device_hwctx->session, s->session);
528  if (ret != MFX_ERR_NONE)
529  return AVERROR_UNKNOWN;
530  }
531 
533  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
534  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
535  s->opaque_alloc.In.Type = s->in_mem_mode;
536 
537  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
538  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
539  s->opaque_alloc.Out.Type = s->out_mem_mode;
540 
541  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
542  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
544  mfxFrameAllocator frame_allocator = {
545  .pthis = s,
546  .Alloc = frame_alloc,
547  .Lock = frame_lock,
548  .Unlock = frame_unlock,
549  .GetHDL = frame_get_hdl,
550  .Free = frame_free,
551  };
552 
553  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
554  if (ret != MFX_ERR_NONE)
555  return AVERROR_UNKNOWN;
556  }
557 
558  return 0;
559 }
560 
562 {
563  int i;
564  int ret;
565  QSVVPPContext *s;
566 
567  s = av_mallocz(sizeof(*s));
568  if (!s)
569  return AVERROR(ENOMEM);
570 
571  s->filter_frame = param->filter_frame;
572  if (!s->filter_frame)
574  s->out_sw_format = param->out_sw_format;
575 
576  /* create the vpp session */
577  ret = init_vpp_session(avctx, s);
578  if (ret < 0)
579  goto failed;
580 
581  s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
582  if (!s->frame_infos) {
583  ret = AVERROR(ENOMEM);
584  goto failed;
585  }
586 
587  /* Init each input's information */
588  for (i = 0; i < avctx->nb_inputs; i++) {
589  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
590  if (ret < 0)
591  goto failed;
592  }
593 
594  /* Update input's frame info according to crop */
595  for (i = 0; i < param->num_crop; i++) {
596  QSVVPPCrop *crop = param->crop + i;
597  if (crop->in_idx > avctx->nb_inputs) {
598  ret = AVERROR(EINVAL);
599  goto failed;
600  }
601  s->frame_infos[crop->in_idx].CropX = crop->x;
602  s->frame_infos[crop->in_idx].CropY = crop->y;
603  s->frame_infos[crop->in_idx].CropW = crop->w;
604  s->frame_infos[crop->in_idx].CropH = crop->h;
605  }
606 
607  s->vpp_param.vpp.In = s->frame_infos[0];
608 
609  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
610  if (ret < 0) {
611  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
612  goto failed;
613  }
614 
616  s->nb_ext_buffers = param->num_ext_buf + 1;
618  if (!s->ext_buffers) {
619  ret = AVERROR(ENOMEM);
620  goto failed;
621  }
622 
623  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
624  for (i = 1; i < param->num_ext_buf; i++)
625  s->ext_buffers[i] = param->ext_buf[i - 1];
626  s->vpp_param.ExtParam = s->ext_buffers;
627  s->vpp_param.NumExtParam = s->nb_ext_buffers;
628  } else {
629  s->vpp_param.NumExtParam = param->num_ext_buf;
630  s->vpp_param.ExtParam = param->ext_buf;
631  }
632 
633  s->vpp_param.AsyncDepth = 1;
634 
636  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
637  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
638  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
639  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
640  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
641 
643  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
644  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
645  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
646  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
647  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
648 
649  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
650  if (ret < 0) {
651  av_log(avctx, AV_LOG_ERROR, "Failed to create a qsvvpp, ret = %d.\n", ret);
652  goto failed;
653  }
654 
655  *vpp = s;
656  return 0;
657 
658 failed:
659  ff_qsvvpp_free(&s);
660 
661  return ret;
662 }
663 
665 {
666  QSVVPPContext *s = *vpp;
667 
668  if (!s)
669  return 0;
670 
671  if (s->session) {
672  MFXVideoVPP_Close(s->session);
673  MFXClose(s->session);
674  }
675 
676  /* release all the resources */
681  av_freep(&s->ext_buffers);
682  av_freep(&s->frame_infos);
683  av_freep(vpp);
684 
685  return 0;
686 }
687 
689 {
690  AVFilterContext *ctx = inlink->dst;
691  AVFilterLink *outlink = ctx->outputs[0];
692  mfxSyncPoint sync;
693  QSVFrame *in_frame, *out_frame;
694  int ret, filter_ret;
695 
696  in_frame = submit_frame(s, inlink, picref);
697  if (!in_frame) {
698  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
699  FF_INLINK_IDX(inlink));
700  return AVERROR(ENOMEM);
701  }
702 
703  do {
704  out_frame = query_frame(s, outlink);
705  if (!out_frame) {
706  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
707  return AVERROR(ENOMEM);
708  }
709 
710  do {
711  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
712  out_frame->surface, NULL, &sync);
713  if (ret == MFX_WRN_DEVICE_BUSY)
714  av_usleep(500);
715  } while (ret == MFX_WRN_DEVICE_BUSY);
716 
717  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
718  /* Ignore more_data error */
719  if (ret == MFX_ERR_MORE_DATA)
720  ret = AVERROR(EAGAIN);
721  break;
722  }
723 
724  if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
725  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
726 
727  out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
728  default_tb, outlink->time_base);
729 
730  filter_ret = s->filter_frame(outlink, out_frame->frame);
731  if (filter_ret < 0) {
732  av_frame_free(&out_frame->frame);
733  ret = filter_ret;
734  break;
735  }
736  out_frame->frame = NULL;
737  } while(ret == MFX_ERR_MORE_SURFACE);
738 
739  return ret;
740 }
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
#define NULL
Definition: coverity.c:32
static enum AVPixelFormat pix_fmt
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
static const mfxHandleType handle_types[]
Definition: qsvvpp.c:71
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:255
mfxFrameInfo * frame_infos
Definition: qsvvpp.c:53
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
mfxSession session
Definition: qsvvpp.c:49
mfxHandleType handle_type
Definition: hwcontext_qsv.c:85
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:68
int in_mem_mode
Definition: qsvvpp.c:56
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:152
int nb_surface_ptrs_in
Definition: qsvvpp.c:60
int num
Numerator.
Definition: rational.h:59
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:437
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in...
Definition: avfilter.h:394
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:57
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:208
mfxFrameSurface1 * surface
Definition: qsvvpp.c:42
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVFrame * in_frame_list
Definition: qsvvpp.c:58
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
mfxExtBuffer ** ext_buffers
Definition: qsvvpp.c:67
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate.The lists are not just lists
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:114
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
#define AV_PIX_FMT_P010
Definition: pixfmt.h:436
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1093
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:561
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:136
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
mfxFrameSurface1 ** surface_ptrs_in
Definition: qsvvpp.c:62
int nb_ext_buffers
Definition: qsvvpp.c:68
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:388
static const AVRational default_tb
Definition: qsvvpp.c:77
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:232
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
int w
Definition: qsvvpp.h:46
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:442
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: qsvvpp.c:66
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:424
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
int num_ext_buf
Definition: qsvvpp.h:54
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
int width
Definition: frame.h:353
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
enum AVPixelFormat out_sw_format
Definition: qsvvpp.c:51
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:329
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:350
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:465
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:792
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:198
int x
Definition: qsvvpp.h:46
unsigned nb_inputs
number of input pads
Definition: avfilter.h:347
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:80
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:402
mfxVideoParam vpp_param
Definition: qsvvpp.c:52
AVFormatContext * ctx
Definition: movenc.c:48
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:278
#define s(width, name)
Definition: cbs_vp9.c:257
int num_crop
Definition: qsvvpp.h:61
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:125
mfxFrameSurface1 surface_internal
Definition: qsvvpp.c:43
mfxFrameSurface1 surface
Definition: qsv_internal.h:72
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
#define FF_ARRAY_ELEMS(a)
if(ret)
mfxSession session
Definition: hwcontext_qsv.h:36
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:368
int y
Definition: qsvvpp.h:46
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:688
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:360
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:80
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:161
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:664
Rational number (pair of numerator and denominator).
Definition: rational.h:58
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
static int filter_frame(DBEContext *s, AVFrame *frame)
Definition: dolby_e.c:565
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:140
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:55
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:348
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:243
A reference to a data buffer.
Definition: buffer.h:81
int
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.c:50
common internal and external API header
mfxFrameSurface1 ** surface_ptrs_out
Definition: qsvvpp.c:63
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:243
int h
Crop rectangle.
Definition: qsvvpp.h:46
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:130
QSVFrame * out_frame_list
Definition: qsvvpp.c:59
int den
Denominator.
Definition: rational.h:60
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
int in_idx
Input index.
Definition: qsvvpp.h:45
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:447
int nb_surface_ptrs_out
Definition: qsvvpp.c:61
An instance of a filter.
Definition: avfilter.h:338
QSVVPPCrop * crop
Definition: qsvvpp.h:62
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:58
Intel Quick Sync Video VPP base function.
int height
Definition: frame.h:353
FILE * out
Definition: movenc.c:54
#define av_freep(p)
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:71
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:51
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
for(j=16;j >0;--j)
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:185
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:654
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:120
int out_mem_mode
Definition: qsvvpp.c:57
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:191