FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 
40 typedef struct QSVFrame {
41  AVFrame *frame;
42  mfxFrameSurface1 *surface;
43  mfxFrameSurface1 surface_internal; /* for system memory */
44  struct QSVFrame *next;
45 } QSVFrame;
46 
47 /* abstract struct for all QSV filters */
48 struct QSVVPPContext {
49  mfxSession session;
50  int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
51  enum AVPixelFormat out_sw_format; /* Real output format */
52  mfxVideoParam vpp_param;
53  mfxFrameInfo *frame_infos; /* frame info for each input */
54 
55  /* members related to the input/output surface */
62  mfxFrameSurface1 **surface_ptrs_in;
63  mfxFrameSurface1 **surface_ptrs_out;
64 
65  /* MFXVPP extern parameters */
66  mfxExtOpaqueSurfaceAlloc opaque_alloc;
67  mfxExtBuffer **ext_buffers;
69 };
70 
71 static const mfxHandleType handle_types[] = {
72  MFX_HANDLE_VA_DISPLAY,
73  MFX_HANDLE_D3D9_DEVICE_MANAGER,
74  MFX_HANDLE_D3D11_DEVICE,
75 };
76 
77 static const AVRational default_tb = { 1, 90000 };
78 
79 /* functions for frameAlloc */
80 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
81  mfxFrameAllocResponse *resp)
82 {
83  QSVVPPContext *s = pthis;
84  int i;
85 
86  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
87  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
88  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
89  return MFX_ERR_UNSUPPORTED;
90 
91  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
92  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
93  if (!resp->mids)
94  return AVERROR(ENOMEM);
95 
96  for (i = 0; i < s->nb_surface_ptrs_in; i++)
97  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
98 
99  resp->NumFrameActual = s->nb_surface_ptrs_in;
100  } else {
101  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
102  if (!resp->mids)
103  return AVERROR(ENOMEM);
104 
105  for (i = 0; i < s->nb_surface_ptrs_out; i++)
106  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
107 
108  resp->NumFrameActual = s->nb_surface_ptrs_out;
109  }
110 
111  return MFX_ERR_NONE;
112 }
113 
114 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
115 {
116  av_freep(&resp->mids);
117  return MFX_ERR_NONE;
118 }
119 
120 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
121 {
122  return MFX_ERR_UNSUPPORTED;
123 }
124 
125 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
126 {
127  return MFX_ERR_UNSUPPORTED;
128 }
129 
130 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
131 {
132  *hdl = mid;
133  return MFX_ERR_NONE;
134 }
135 
137 {
138  switch (format) {
139  case AV_PIX_FMT_YUV420P:
140  return MFX_FOURCC_YV12;
141  case AV_PIX_FMT_NV12:
142  return MFX_FOURCC_NV12;
143  case AV_PIX_FMT_YUYV422:
144  return MFX_FOURCC_YUY2;
145  case AV_PIX_FMT_BGRA:
146  return MFX_FOURCC_RGB4;
147  }
148 
149  return MFX_FOURCC_NV12;
150 }
151 
152 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
153 {
154  switch (frame->format) {
155  case AV_PIX_FMT_NV12:
156  surface->Data.Y = frame->data[0];
157  surface->Data.UV = frame->data[1];
158  break;
159  case AV_PIX_FMT_YUV420P:
160  surface->Data.Y = frame->data[0];
161  surface->Data.U = frame->data[1];
162  surface->Data.V = frame->data[2];
163  break;
164  case AV_PIX_FMT_YUYV422:
165  surface->Data.Y = frame->data[0];
166  surface->Data.U = frame->data[0] + 1;
167  surface->Data.V = frame->data[0] + 3;
168  break;
169  case AV_PIX_FMT_RGB32:
170  surface->Data.B = frame->data[0];
171  surface->Data.G = frame->data[0] + 1;
172  surface->Data.R = frame->data[0] + 2;
173  surface->Data.A = frame->data[0] + 3;
174  break;
175  default:
176  return MFX_ERR_UNSUPPORTED;
177  }
178  surface->Data.Pitch = frame->linesize[0];
179 
180  return 0;
181 }
182 
183 /* fill the surface info */
184 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
185 {
186  enum AVPixelFormat pix_fmt;
187  AVHWFramesContext *frames_ctx;
188  AVQSVFramesContext *frames_hwctx;
189  const AVPixFmtDescriptor *desc;
190 
191  if (link->format == AV_PIX_FMT_QSV) {
192  if (!link->hw_frames_ctx)
193  return AVERROR(EINVAL);
194 
195  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
196  frames_hwctx = frames_ctx->hwctx;
197  *frameinfo = frames_hwctx->surfaces[0].Info;
198  } else {
199  pix_fmt = link->format;
200  desc = av_pix_fmt_desc_get(pix_fmt);
201  if (!desc)
202  return AVERROR_BUG;
203 
204  frameinfo->CropX = 0;
205  frameinfo->CropY = 0;
206  frameinfo->Width = FFALIGN(link->w, 32);
207  frameinfo->Height = FFALIGN(link->h, 32);
208  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
209  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
210  frameinfo->BitDepthLuma = desc->comp[0].depth;
211  frameinfo->BitDepthChroma = desc->comp[0].depth;
212  frameinfo->Shift = desc->comp[0].depth > 8;
213  if (desc->log2_chroma_w && desc->log2_chroma_h)
214  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
215  else if (desc->log2_chroma_w)
216  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
217  else
218  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
219  }
220 
221  frameinfo->CropW = link->w;
222  frameinfo->CropH = link->h;
223  frameinfo->FrameRateExtN = link->frame_rate.num;
224  frameinfo->FrameRateExtD = link->frame_rate.den;
225  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
226  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
227 
228  return 0;
229 }
230 
231 static void clear_unused_frames(QSVFrame *list)
232 {
233  while (list) {
234  if (list->surface && !list->surface->Data.Locked) {
235  list->surface = NULL;
236  av_frame_free(&list->frame);
237  }
238  list = list->next;
239  }
240 }
241 
242 static void clear_frame_list(QSVFrame **list)
243 {
244  while (*list) {
245  QSVFrame *frame;
246 
247  frame = *list;
248  *list = (*list)->next;
249  av_frame_free(&frame->frame);
250  av_freep(&frame);
251  }
252 }
253 
255 {
256  QSVFrame *out = *list;
257 
258  for (; out; out = out->next) {
259  if (!out->surface)
260  break;
261  }
262 
263  if (!out) {
264  out = av_mallocz(sizeof(*out));
265  if (!out) {
266  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
267  return NULL;
268  }
269  out->next = *list;
270  *list = out;
271  }
272 
273  return out;
274 }
275 
276 /* get the input surface */
278 {
279  QSVFrame *qsv_frame;
280  AVFilterContext *ctx = inlink->dst;
281 
283 
284  qsv_frame = get_free_frame(&s->in_frame_list);
285  if (!qsv_frame)
286  return NULL;
287 
288  /* Turn AVFrame into mfxFrameSurface1.
289  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
290  * mfxFrameSurface1 is stored in AVFrame->data[3];
291  * for system memory mode, raw video data is stored in
292  * AVFrame, we should map it into mfxFrameSurface1.
293  */
294  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
295  if (picref->format != AV_PIX_FMT_QSV) {
296  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
297  return NULL;
298  }
299  qsv_frame->frame = av_frame_clone(picref);
300  qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
301  } else {
302  /* make a copy if the input is not padded as libmfx requires */
303  if (picref->height & 31 || picref->linesize[0] & 31) {
304  qsv_frame->frame = ff_get_video_buffer(inlink,
305  FFALIGN(inlink->w, 32),
306  FFALIGN(inlink->h, 32));
307  if (!qsv_frame->frame)
308  return NULL;
309 
310  qsv_frame->frame->width = picref->width;
311  qsv_frame->frame->height = picref->height;
312 
313  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
314  av_frame_free(&qsv_frame->frame);
315  return NULL;
316  }
317 
318  av_frame_copy_props(qsv_frame->frame, picref);
319  av_frame_free(&picref);
320  } else
321  qsv_frame->frame = av_frame_clone(picref);
322 
323  if (map_frame_to_surface(qsv_frame->frame,
324  &qsv_frame->surface_internal) < 0) {
325  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
326  return NULL;
327  }
328  qsv_frame->surface = &qsv_frame->surface_internal;
329  }
330 
331  qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
332  qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
333  inlink->time_base, default_tb);
334 
335  qsv_frame->surface->Info.PicStruct =
336  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
337  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
338  MFX_PICSTRUCT_FIELD_BFF);
339  if (qsv_frame->frame->repeat_pict == 1)
340  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
341  else if (qsv_frame->frame->repeat_pict == 2)
342  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
343  else if (qsv_frame->frame->repeat_pict == 4)
344  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
345 
346  return qsv_frame;
347 }
348 
349 /* get the output surface */
351 {
352  AVFilterContext *ctx = outlink->src;
353  QSVFrame *out_frame;
354  int ret;
355 
357 
358  out_frame = get_free_frame(&s->out_frame_list);
359  if (!out_frame)
360  return NULL;
361 
362  /* For video memory, get a hw frame;
363  * For system memory, get a sw frame and map it into a mfx_surface. */
364  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
365  out_frame->frame = av_frame_alloc();
366  if (!out_frame->frame)
367  return NULL;
368 
369  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
370  if (ret < 0) {
371  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
372  return NULL;
373  }
374 
375  out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
376  } else {
377  /* Get a frame with aligned dimensions.
378  * Libmfx need system memory being 128x64 aligned */
379  out_frame->frame = ff_get_video_buffer(outlink,
380  FFALIGN(outlink->w, 128),
381  FFALIGN(outlink->h, 64));
382  if (!out_frame->frame)
383  return NULL;
384 
385  out_frame->frame->width = outlink->w;
386  out_frame->frame->height = outlink->h;
387 
388  ret = map_frame_to_surface(out_frame->frame,
389  &out_frame->surface_internal);
390  if (ret < 0)
391  return NULL;
392 
393  out_frame->surface = &out_frame->surface_internal;
394  }
395 
396  out_frame->surface->Info = s->vpp_param.vpp.Out;
397 
398  return out_frame;
399 }
400 
401 /* create the QSV session */
403 {
404  AVFilterLink *inlink = avctx->inputs[0];
405  AVFilterLink *outlink = avctx->outputs[0];
406  AVQSVFramesContext *in_frames_hwctx = NULL;
407  AVQSVFramesContext *out_frames_hwctx = NULL;
408 
409  AVBufferRef *device_ref;
410  AVHWDeviceContext *device_ctx;
411  AVQSVDeviceContext *device_hwctx;
412  mfxHDL handle;
413  mfxHandleType handle_type;
414  mfxVersion ver;
415  mfxIMPL impl;
416  int ret, i;
417 
418  if (inlink->hw_frames_ctx) {
419  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
420 
421  device_ref = frames_ctx->device_ref;
422  in_frames_hwctx = frames_ctx->hwctx;
423 
424  s->in_mem_mode = in_frames_hwctx->frame_type;
425 
426  s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
427  sizeof(*s->surface_ptrs_in));
428  if (!s->surface_ptrs_in)
429  return AVERROR(ENOMEM);
430 
431  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
432  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
433 
434  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
435  } else if (avctx->hw_device_ctx) {
436  device_ref = avctx->hw_device_ctx;
437  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
438  } else {
439  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
440  return AVERROR(EINVAL);
441  }
442 
443  device_ctx = (AVHWDeviceContext *)device_ref->data;
444  device_hwctx = device_ctx->hwctx;
445 
446  if (outlink->format == AV_PIX_FMT_QSV) {
447  AVHWFramesContext *out_frames_ctx;
448  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
449  if (!out_frames_ref)
450  return AVERROR(ENOMEM);
451 
453  MFX_MEMTYPE_OPAQUE_FRAME :
454  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
455 
456  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
457  out_frames_hwctx = out_frames_ctx->hwctx;
458 
459  out_frames_ctx->format = AV_PIX_FMT_QSV;
460  out_frames_ctx->width = FFALIGN(outlink->w, 32);
461  out_frames_ctx->height = FFALIGN(outlink->h, 32);
462  out_frames_ctx->sw_format = s->out_sw_format;
463  out_frames_ctx->initial_pool_size = 64;
464  out_frames_hwctx->frame_type = s->out_mem_mode;
465 
466  ret = av_hwframe_ctx_init(out_frames_ref);
467  if (ret < 0) {
468  av_buffer_unref(&out_frames_ref);
469  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
470  return ret;
471  }
472 
473  s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
474  sizeof(*s->surface_ptrs_out));
475  if (!s->surface_ptrs_out) {
476  av_buffer_unref(&out_frames_ref);
477  return AVERROR(ENOMEM);
478  }
479 
480  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
481  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
482  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
483 
484  av_buffer_unref(&outlink->hw_frames_ctx);
485  outlink->hw_frames_ctx = out_frames_ref;
486  } else
487  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
488 
489  /* extract the properties of the "master" session given to us */
490  ret = MFXQueryIMPL(device_hwctx->session, &impl);
491  if (ret == MFX_ERR_NONE)
492  ret = MFXQueryVersion(device_hwctx->session, &ver);
493  if (ret != MFX_ERR_NONE) {
494  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
495  return AVERROR_UNKNOWN;
496  }
497 
498  for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
499  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
500  if (ret == MFX_ERR_NONE) {
501  handle_type = handle_types[i];
502  break;
503  }
504  }
505 
506  if (ret != MFX_ERR_NONE) {
507  av_log(avctx, AV_LOG_ERROR, "Error getting the session handle\n");
508  return AVERROR_UNKNOWN;
509  }
510 
511  /* create a "slave" session with those same properties, to be used for vpp */
512  ret = MFXInit(impl, &ver, &s->session);
513  if (ret != MFX_ERR_NONE) {
514  av_log(avctx, AV_LOG_ERROR, "Error initializing a session for scaling\n");
515  return AVERROR_UNKNOWN;
516  }
517 
518  if (handle) {
519  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
520  if (ret != MFX_ERR_NONE)
521  return AVERROR_UNKNOWN;
522  }
523 
524  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
525  ret = MFXJoinSession(device_hwctx->session, s->session);
526  if (ret != MFX_ERR_NONE)
527  return AVERROR_UNKNOWN;
528  }
529 
531  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
532  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
533  s->opaque_alloc.In.Type = s->in_mem_mode;
534 
535  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
536  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
537  s->opaque_alloc.Out.Type = s->out_mem_mode;
538 
539  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
540  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
542  mfxFrameAllocator frame_allocator = {
543  .pthis = s,
544  .Alloc = frame_alloc,
545  .Lock = frame_lock,
546  .Unlock = frame_unlock,
547  .GetHDL = frame_get_hdl,
548  .Free = frame_free,
549  };
550 
551  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
552  if (ret != MFX_ERR_NONE)
553  return AVERROR_UNKNOWN;
554  }
555 
556  return 0;
557 }
558 
560 {
561  int i;
562  int ret;
563  QSVVPPContext *s;
564 
565  s = av_mallocz(sizeof(*s));
566  if (!s)
567  return AVERROR(ENOMEM);
568 
569  s->filter_frame = param->filter_frame;
570  if (!s->filter_frame)
572  s->out_sw_format = param->out_sw_format;
573 
574  /* create the vpp session */
575  ret = init_vpp_session(avctx, s);
576  if (ret < 0)
577  goto failed;
578 
579  s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
580  if (!s->frame_infos) {
581  ret = AVERROR(ENOMEM);
582  goto failed;
583  }
584 
585  /* Init each input's information */
586  for (i = 0; i < avctx->nb_inputs; i++) {
587  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
588  if (ret < 0)
589  goto failed;
590  }
591 
592  /* Update input's frame info according to crop */
593  for (i = 0; i < param->num_crop; i++) {
594  QSVVPPCrop *crop = param->crop + i;
595  if (crop->in_idx > avctx->nb_inputs) {
596  ret = AVERROR(EINVAL);
597  goto failed;
598  }
599  s->frame_infos[crop->in_idx].CropX = crop->x;
600  s->frame_infos[crop->in_idx].CropY = crop->y;
601  s->frame_infos[crop->in_idx].CropW = crop->w;
602  s->frame_infos[crop->in_idx].CropH = crop->h;
603  }
604 
605  s->vpp_param.vpp.In = s->frame_infos[0];
606 
607  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
608  if (ret < 0) {
609  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
610  goto failed;
611  }
612 
614  s->nb_ext_buffers = param->num_ext_buf + 1;
616  if (!s->ext_buffers) {
617  ret = AVERROR(ENOMEM);
618  goto failed;
619  }
620 
621  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
622  for (i = 1; i < param->num_ext_buf; i++)
623  s->ext_buffers[i] = param->ext_buf[i - 1];
624  s->vpp_param.ExtParam = s->ext_buffers;
625  s->vpp_param.NumExtParam = s->nb_ext_buffers;
626  } else {
627  s->vpp_param.NumExtParam = param->num_ext_buf;
628  s->vpp_param.ExtParam = param->ext_buf;
629  }
630 
631  s->vpp_param.AsyncDepth = 1;
632 
634  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
635  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
636  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
637  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
638  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
639 
641  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
642  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
643  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
644  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
645  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
646 
647  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
648  if (ret < 0) {
649  av_log(avctx, AV_LOG_ERROR, "Failed to create a qsvvpp, ret = %d.\n", ret);
650  goto failed;
651  }
652 
653  *vpp = s;
654  return 0;
655 
656 failed:
657  ff_qsvvpp_free(&s);
658 
659  return ret;
660 }
661 
663 {
664  QSVVPPContext *s = *vpp;
665 
666  if (!s)
667  return 0;
668 
669  if (s->session) {
670  MFXVideoVPP_Close(s->session);
671  MFXClose(s->session);
672  }
673 
674  /* release all the resources */
679  av_freep(&s->ext_buffers);
680  av_freep(&s->frame_infos);
681  av_freep(vpp);
682 
683  return 0;
684 }
685 
687 {
688  AVFilterContext *ctx = inlink->dst;
689  AVFilterLink *outlink = ctx->outputs[0];
690  mfxSyncPoint sync;
691  QSVFrame *in_frame, *out_frame;
692  int ret, filter_ret;
693 
694  in_frame = submit_frame(s, inlink, picref);
695  if (!in_frame) {
696  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
697  FF_INLINK_IDX(inlink));
698  return AVERROR(ENOMEM);
699  }
700 
701  do {
702  out_frame = query_frame(s, outlink);
703  if (!out_frame) {
704  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
705  return AVERROR(ENOMEM);
706  }
707 
708  do {
709  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
710  out_frame->surface, NULL, &sync);
711  if (ret == MFX_WRN_DEVICE_BUSY)
712  av_usleep(500);
713  } while (ret == MFX_WRN_DEVICE_BUSY);
714 
715  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
716  /* Ignore more_data error */
717  if (ret == MFX_ERR_MORE_DATA)
718  ret = AVERROR(EAGAIN);
719  break;
720  }
721 
722  if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
723  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
724 
725  out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
726  default_tb, outlink->time_base);
727 
728  filter_ret = s->filter_frame(outlink, out_frame->frame);
729  if (filter_ret < 0) {
730  av_frame_free(&out_frame->frame);
731  ret = filter_ret;
732  break;
733  }
734  out_frame->frame = NULL;
735  } while(ret == MFX_ERR_MORE_SURFACE);
736 
737  return ret;
738 }
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
#define NULL
Definition: coverity.c:32
static enum AVPixelFormat pix_fmt
static const char * format[]
Definition: af_aiir.c:330
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2446
This structure describes decoded (raw) audio or video data.
Definition: frame.h:226
static const mfxHandleType handle_types[]
Definition: qsvvpp.c:71
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:254
mfxFrameInfo * frame_infos
Definition: qsvvpp.c:53
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
mfxSession session
Definition: qsvvpp.c:49
mfxHandleType handle_type
Definition: hwcontext_qsv.c:85
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:65
int in_mem_mode
Definition: qsvvpp.c:56
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:152
int nb_surface_ptrs_in
Definition: qsvvpp.c:60
int num
Numerator.
Definition: rational.h:59
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:368
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in...
Definition: avfilter.h:394
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:41
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:208
mfxFrameSurface1 * surface
Definition: qsvvpp.c:42
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVFrame * in_frame_list
Definition: qsvvpp.c:58
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
mfxExtBuffer ** ext_buffers
Definition: qsvvpp.c:67
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:114
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:559
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:136
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
mfxFrameSurface1 ** surface_ptrs_in
Definition: qsvvpp.c:62
int nb_ext_buffers
Definition: qsvvpp.c:68
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.c:50
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:319
static const AVRational default_tb
Definition: qsvvpp.c:77
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:231
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
int w
Definition: qsvvpp.h:46
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:373
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: qsvvpp.c:66
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
int num_ext_buf
Definition: qsvvpp.h:54
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
int width
Definition: frame.h:284
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
enum AVPixelFormat out_sw_format
Definition: qsvvpp.c:51
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:329
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:350
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:465
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:792
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:51
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:198
int x
Definition: qsvvpp.h:46
unsigned nb_inputs
number of input pads
Definition: avfilter.h:347
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:80
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:402
mfxVideoParam vpp_param
Definition: qsvvpp.c:52
AVFormatContext * ctx
Definition: movenc.c:48
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:277
#define s(width, name)
Definition: cbs_vp9.c:257
int num_crop
Definition: qsvvpp.h:61
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:125
mfxFrameSurface1 surface_internal
Definition: qsvvpp.c:43
mfxFrameSurface1 surface
Definition: qsv_internal.h:56
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
#define FF_ARRAY_ELEMS(a)
mfxSession session
Definition: hwcontext_qsv.h:36
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:299
int y
Definition: qsvvpp.h:46
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:257
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:686
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:352
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:64
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:161
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:662
Rational number (pair of numerator and denominator).
Definition: rational.h:58
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:240
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:140
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:55
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:348
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:242
A reference to a data buffer.
Definition: buffer.h:81
int
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal and external API header
if(ret< 0)
Definition: vf_mcdeint.c:279
mfxFrameSurface1 ** surface_ptrs_out
Definition: qsvvpp.c:63
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:243
int h
Crop rectangle.
Definition: qsvvpp.h:46
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:130
QSVFrame * out_frame_list
Definition: qsvvpp.c:59
int den
Denominator.
Definition: rational.h:60
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
int in_idx
Input index.
Definition: qsvvpp.h:45
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:378
int nb_surface_ptrs_out
Definition: qsvvpp.c:61
An instance of a filter.
Definition: avfilter.h:338
QSVVPPCrop * crop
Definition: qsvvpp.h:62
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:58
Intel Quick Sync Video VPP base function.
int height
Definition: frame.h:284
FILE * out
Definition: movenc.c:54
#define av_freep(p)
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:55
internal API functions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
for(j=16;j >0;--j)
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:184
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:654
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:120
int out_mem_mode
Definition: qsvvpp.c:57
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:191