FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 
40 typedef struct QSVFrame {
41  AVFrame *frame;
42  mfxFrameSurface1 *surface;
43  mfxFrameSurface1 surface_internal; /* for system memory */
44  struct QSVFrame *next;
45 } QSVFrame;
46 
47 /* abstract struct for all QSV filters */
48 struct QSVVPPContext {
49  mfxSession session;
50  int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
51  enum AVPixelFormat out_sw_format; /* Real output format */
52  mfxVideoParam vpp_param;
53  mfxFrameInfo *frame_infos; /* frame info for each input */
54 
55  /* members related to the input/output surface */
62  mfxFrameSurface1 **surface_ptrs_in;
63  mfxFrameSurface1 **surface_ptrs_out;
64 
65  /* MFXVPP extern parameters */
66  mfxExtOpaqueSurfaceAlloc opaque_alloc;
67  mfxExtBuffer **ext_buffers;
69 };
70 
71 static const mfxHandleType handle_types[] = {
72  MFX_HANDLE_VA_DISPLAY,
73  MFX_HANDLE_D3D9_DEVICE_MANAGER,
74  MFX_HANDLE_D3D11_DEVICE,
75 };
76 
77 static const AVRational default_tb = { 1, 90000 };
78 
79 /* functions for frameAlloc */
80 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
81  mfxFrameAllocResponse *resp)
82 {
83  QSVVPPContext *s = pthis;
84  int i;
85 
86  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
87  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
88  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
89  return MFX_ERR_UNSUPPORTED;
90 
91  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
92  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
93  if (!resp->mids)
94  return AVERROR(ENOMEM);
95 
96  for (i = 0; i < s->nb_surface_ptrs_in; i++)
97  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
98 
99  resp->NumFrameActual = s->nb_surface_ptrs_in;
100  } else {
101  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
102  if (!resp->mids)
103  return AVERROR(ENOMEM);
104 
105  for (i = 0; i < s->nb_surface_ptrs_out; i++)
106  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
107 
108  resp->NumFrameActual = s->nb_surface_ptrs_out;
109  }
110 
111  return MFX_ERR_NONE;
112 }
113 
114 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
115 {
116  av_freep(&resp->mids);
117  return MFX_ERR_NONE;
118 }
119 
120 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
121 {
122  return MFX_ERR_UNSUPPORTED;
123 }
124 
125 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
126 {
127  return MFX_ERR_UNSUPPORTED;
128 }
129 
130 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
131 {
132  *hdl = mid;
133  return MFX_ERR_NONE;
134 }
135 
137 {
138  switch (format) {
139  case AV_PIX_FMT_YUV420P:
140  return MFX_FOURCC_YV12;
141  case AV_PIX_FMT_NV12:
142  return MFX_FOURCC_NV12;
143  case AV_PIX_FMT_YUYV422:
144  return MFX_FOURCC_YUY2;
145  case AV_PIX_FMT_RGB32:
146  return MFX_FOURCC_RGB4;
147  }
148 
149  return MFX_FOURCC_NV12;
150 }
151 
152 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
153 {
154  switch (frame->format) {
155  case AV_PIX_FMT_NV12:
156  surface->Data.Y = frame->data[0];
157  surface->Data.UV = frame->data[1];
158  break;
159  case AV_PIX_FMT_YUV420P:
160  surface->Data.Y = frame->data[0];
161  surface->Data.U = frame->data[1];
162  surface->Data.V = frame->data[2];
163  break;
164  case AV_PIX_FMT_YUYV422:
165  surface->Data.Y = frame->data[0];
166  surface->Data.U = frame->data[0] + 1;
167  surface->Data.V = frame->data[0] + 3;
168  break;
169  case AV_PIX_FMT_RGB32:
170  surface->Data.B = frame->data[0];
171  surface->Data.G = frame->data[0] + 1;
172  surface->Data.R = frame->data[0] + 2;
173  surface->Data.A = frame->data[0] + 3;
174  break;
175  default:
176  return MFX_ERR_UNSUPPORTED;
177  }
178  surface->Data.Pitch = frame->linesize[0];
179 
180  return 0;
181 }
182 
183 /* fill the surface info */
184 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
185 {
186  enum AVPixelFormat pix_fmt;
187  AVHWFramesContext *frames_ctx;
188  AVQSVFramesContext *frames_hwctx;
189  const AVPixFmtDescriptor *desc;
190 
191  if (link->format == AV_PIX_FMT_QSV) {
192  if (!link->hw_frames_ctx)
193  return AVERROR(EINVAL);
194 
195  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
196  frames_hwctx = frames_ctx->hwctx;
197  *frameinfo = frames_hwctx->surfaces[0].Info;
198  } else {
199  pix_fmt = link->format;
200  desc = av_pix_fmt_desc_get(pix_fmt);
201  if (!desc)
202  return AVERROR_BUG;
203 
204  frameinfo->CropX = 0;
205  frameinfo->CropY = 0;
206  frameinfo->Width = FFALIGN(link->w, 32);
207  frameinfo->Height = FFALIGN(link->h, 32);
208  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
209  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
210  frameinfo->BitDepthLuma = desc->comp[0].depth;
211  frameinfo->BitDepthChroma = desc->comp[0].depth;
212  frameinfo->Shift = desc->comp[0].depth > 8;
213  if (desc->log2_chroma_w && desc->log2_chroma_h)
214  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
215  else if (desc->log2_chroma_w)
216  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
217  else
218  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
219  }
220 
221  frameinfo->CropW = link->w;
222  frameinfo->CropH = link->h;
223  frameinfo->FrameRateExtN = link->frame_rate.num;
224  frameinfo->FrameRateExtD = link->frame_rate.den;
225  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
226  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
227 
228  return 0;
229 }
230 
231 static void clear_unused_frames(QSVFrame *list)
232 {
233  while (list) {
234  if (list->surface && !list->surface->Data.Locked) {
235  list->surface = NULL;
236  av_frame_free(&list->frame);
237  }
238  list = list->next;
239  }
240 }
241 
242 static void clear_frame_list(QSVFrame **list)
243 {
244  while (*list) {
245  QSVFrame *frame;
246 
247  frame = *list;
248  *list = (*list)->next;
249  av_frame_free(&frame->frame);
250  av_freep(&frame);
251  }
252 }
253 
255 {
256  QSVFrame *out = *list;
257 
258  for (; out; out = out->next) {
259  if (!out->surface)
260  break;
261  }
262 
263  if (!out) {
264  out = av_mallocz(sizeof(*out));
265  if (!out) {
266  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
267  return NULL;
268  }
269  out->next = *list;
270  *list = out;
271  }
272 
273  return out;
274 }
275 
276 /* get the input surface */
278 {
279  QSVFrame *qsv_frame;
280  AVFilterContext *ctx = inlink->dst;
281 
283 
284  qsv_frame = get_free_frame(&s->in_frame_list);
285  if (!qsv_frame)
286  return NULL;
287 
288  /* Turn AVFrame into mfxFrameSurface1.
289  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
290  * mfxFrameSurface1 is stored in AVFrame->data[3];
291  * for system memory mode, raw video data is stored in
292  * AVFrame, we should map it into mfxFrameSurface1.
293  */
294  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
295  if (picref->format != AV_PIX_FMT_QSV) {
296  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
297  return NULL;
298  }
299  qsv_frame->frame = av_frame_clone(picref);
300  qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
301  } else {
302  /* make a copy if the input is not padded as libmfx requires */
303  if (picref->height & 31 || picref->linesize[0] & 31) {
304  qsv_frame->frame = ff_get_video_buffer(inlink,
305  FFALIGN(inlink->w, 32),
306  FFALIGN(inlink->h, 32));
307  if (!qsv_frame->frame)
308  return NULL;
309 
310  qsv_frame->frame->width = picref->width;
311  qsv_frame->frame->height = picref->height;
312 
313  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
314  av_frame_free(&qsv_frame->frame);
315  return NULL;
316  }
317 
318  av_frame_copy_props(qsv_frame->frame, picref);
319  av_frame_free(&picref);
320  } else
321  qsv_frame->frame = av_frame_clone(picref);
322 
323  if (map_frame_to_surface(qsv_frame->frame,
324  &qsv_frame->surface_internal) < 0) {
325  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
326  return NULL;
327  }
328  qsv_frame->surface = &qsv_frame->surface_internal;
329  }
330 
331  qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
332  qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
333  inlink->time_base, default_tb);
334 
335  qsv_frame->surface->Info.PicStruct =
336  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
337  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
338  MFX_PICSTRUCT_FIELD_BFF);
339  if (qsv_frame->frame->repeat_pict == 1)
340  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
341  else if (qsv_frame->frame->repeat_pict == 2)
342  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
343  else if (qsv_frame->frame->repeat_pict == 4)
344  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
345 
346  return qsv_frame;
347 }
348 
349 /* get the output surface */
351 {
352  AVFilterContext *ctx = outlink->src;
353  QSVFrame *out_frame;
354  int ret;
355 
357 
358  out_frame = get_free_frame(&s->out_frame_list);
359  if (!out_frame)
360  return NULL;
361 
362  /* For video memory, get a hw frame;
363  * For system memory, get a sw frame and map it into a mfx_surface. */
364  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
365  out_frame->frame = av_frame_alloc();
366  if (!out_frame->frame)
367  return NULL;
368 
369  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
370  if (ret < 0) {
371  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
372  return NULL;
373  }
374 
375  out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
376  } else {
377  /* Get a frame with aligned dimensions.
378  * Libmfx need system memory being 128x64 aligned */
379  out_frame->frame = ff_get_video_buffer(outlink,
380  FFALIGN(outlink->w, 128),
381  FFALIGN(outlink->h, 64));
382  if (!out_frame->frame)
383  return NULL;
384 
385  out_frame->frame->width = outlink->w;
386  out_frame->frame->height = outlink->h;
387 
388  ret = map_frame_to_surface(out_frame->frame,
389  &out_frame->surface_internal);
390  if (ret < 0)
391  return NULL;
392 
393  out_frame->surface = &out_frame->surface_internal;
394  }
395 
396  out_frame->surface->Info = s->vpp_param.vpp.Out;
397 
398  return out_frame;
399 }
400 
401 /* create the QSV session */
403 {
404  AVFilterLink *inlink = avctx->inputs[0];
405  AVFilterLink *outlink = avctx->outputs[0];
406  AVQSVFramesContext *in_frames_hwctx = NULL;
407  AVQSVFramesContext *out_frames_hwctx = NULL;
408 
409  AVBufferRef *device_ref;
410  AVHWDeviceContext *device_ctx;
411  AVQSVDeviceContext *device_hwctx;
412  mfxHDL handle;
413  mfxHandleType handle_type;
414  mfxVersion ver;
415  mfxIMPL impl;
416  int ret, i;
417 
418  if (inlink->hw_frames_ctx) {
419  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
420 
421  device_ref = frames_ctx->device_ref;
422  in_frames_hwctx = frames_ctx->hwctx;
423 
424  s->in_mem_mode = in_frames_hwctx->frame_type;
425 
426  s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
427  sizeof(*s->surface_ptrs_in));
428  if (!s->surface_ptrs_in)
429  return AVERROR(ENOMEM);
430 
431  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
432  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
433 
434  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
435  } else if (avctx->hw_device_ctx) {
436  device_ref = avctx->hw_device_ctx;
437  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
438  } else {
439  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
440  return AVERROR(EINVAL);
441  }
442 
443  device_ctx = (AVHWDeviceContext *)device_ref->data;
444  device_hwctx = device_ctx->hwctx;
445 
446  if (outlink->format == AV_PIX_FMT_QSV) {
447  AVHWFramesContext *out_frames_ctx;
448  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
449  if (!out_frames_ref)
450  return AVERROR(ENOMEM);
451 
453  MFX_MEMTYPE_OPAQUE_FRAME :
454  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
455 
456  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
457  out_frames_hwctx = out_frames_ctx->hwctx;
458 
459  out_frames_ctx->format = AV_PIX_FMT_QSV;
460  out_frames_ctx->width = FFALIGN(outlink->w, 32);
461  out_frames_ctx->height = FFALIGN(outlink->h, 32);
462  out_frames_ctx->sw_format = s->out_sw_format;
463  out_frames_ctx->initial_pool_size = 64;
464  out_frames_hwctx->frame_type = s->out_mem_mode;
465 
466  ret = av_hwframe_ctx_init(out_frames_ref);
467  if (ret < 0) {
468  av_buffer_unref(&out_frames_ref);
469  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
470  return ret;
471  }
472 
473  s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
474  sizeof(*s->surface_ptrs_out));
475  if (!s->surface_ptrs_out) {
476  av_buffer_unref(&out_frames_ref);
477  return AVERROR(ENOMEM);
478  }
479 
480  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
481  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
482  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
483 
484  av_buffer_unref(&outlink->hw_frames_ctx);
485  outlink->hw_frames_ctx = out_frames_ref;
486  } else
487  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
488 
489  /* extract the properties of the "master" session given to us */
490  ret = MFXQueryIMPL(device_hwctx->session, &impl);
491  if (ret == MFX_ERR_NONE)
492  ret = MFXQueryVersion(device_hwctx->session, &ver);
493  if (ret != MFX_ERR_NONE) {
494  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
495  return AVERROR_UNKNOWN;
496  }
497 
498  for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
499  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
500  if (ret == MFX_ERR_NONE) {
501  handle_type = handle_types[i];
502  break;
503  }
504  }
505 
506  /* create a "slave" session with those same properties, to be used for vpp */
507  ret = MFXInit(impl, &ver, &s->session);
508  if (ret != MFX_ERR_NONE) {
509  av_log(avctx, AV_LOG_ERROR, "Error initializing a session for scaling\n");
510  return AVERROR_UNKNOWN;
511  }
512 
513  if (handle) {
514  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
515  if (ret != MFX_ERR_NONE)
516  return AVERROR_UNKNOWN;
517  }
518 
519  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
520  ret = MFXJoinSession(device_hwctx->session, s->session);
521  if (ret != MFX_ERR_NONE)
522  return AVERROR_UNKNOWN;
523  }
524 
526  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
527  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
528  s->opaque_alloc.In.Type = s->in_mem_mode;
529 
530  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
531  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
532  s->opaque_alloc.Out.Type = s->out_mem_mode;
533 
534  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
535  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
537  mfxFrameAllocator frame_allocator = {
538  .pthis = s,
539  .Alloc = frame_alloc,
540  .Lock = frame_lock,
541  .Unlock = frame_unlock,
542  .GetHDL = frame_get_hdl,
543  .Free = frame_free,
544  };
545 
546  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
547  if (ret != MFX_ERR_NONE)
548  return AVERROR_UNKNOWN;
549  }
550 
551  return 0;
552 }
553 
555 {
556  int i;
557  int ret;
558  QSVVPPContext *s;
559 
560  s = av_mallocz(sizeof(*s));
561  if (!s)
562  return AVERROR(ENOMEM);
563 
564  s->filter_frame = param->filter_frame;
565  if (!s->filter_frame)
567  s->out_sw_format = param->out_sw_format;
568 
569  /* create the vpp session */
570  ret = init_vpp_session(avctx, s);
571  if (ret < 0)
572  goto failed;
573 
574  s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
575  if (!s->frame_infos) {
576  ret = AVERROR(ENOMEM);
577  goto failed;
578  }
579 
580  /* Init each input's information */
581  for (i = 0; i < avctx->nb_inputs; i++) {
582  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
583  if (ret < 0)
584  goto failed;
585  }
586 
587  /* Update input's frame info according to crop */
588  for (i = 0; i < param->num_crop; i++) {
589  QSVVPPCrop *crop = param->crop + i;
590  if (crop->in_idx > avctx->nb_inputs) {
591  ret = AVERROR(EINVAL);
592  goto failed;
593  }
594  s->frame_infos[crop->in_idx].CropX = crop->x;
595  s->frame_infos[crop->in_idx].CropY = crop->y;
596  s->frame_infos[crop->in_idx].CropW = crop->w;
597  s->frame_infos[crop->in_idx].CropH = crop->h;
598  }
599 
600  s->vpp_param.vpp.In = s->frame_infos[0];
601 
602  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
603  if (ret < 0) {
604  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
605  goto failed;
606  }
607 
609  s->nb_ext_buffers = param->num_ext_buf + 1;
611  if (!s->ext_buffers) {
612  ret = AVERROR(ENOMEM);
613  goto failed;
614  }
615 
616  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
617  for (i = 1; i < param->num_ext_buf; i++)
618  s->ext_buffers[i] = param->ext_buf[i - 1];
619  s->vpp_param.ExtParam = s->ext_buffers;
620  s->vpp_param.NumExtParam = s->nb_ext_buffers;
621  } else {
622  s->vpp_param.NumExtParam = param->num_ext_buf;
623  s->vpp_param.ExtParam = param->ext_buf;
624  }
625 
626  s->vpp_param.AsyncDepth = 1;
627 
629  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
630  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
631  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
632  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
633  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
634 
636  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
637  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
638  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
639  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
640  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
641 
642  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
643  if (ret < 0) {
644  av_log(avctx, AV_LOG_ERROR, "Failed to create a qsvvpp, ret = %d.\n", ret);
645  goto failed;
646  }
647 
648  *vpp = s;
649  return 0;
650 
651 failed:
652  ff_qsvvpp_free(&s);
653 
654  return ret;
655 }
656 
658 {
659  QSVVPPContext *s = *vpp;
660 
661  if (!s)
662  return 0;
663 
664  if (s->session) {
665  MFXVideoVPP_Close(s->session);
666  MFXClose(s->session);
667  }
668 
669  /* release all the resources */
674  av_freep(&s->ext_buffers);
675  av_freep(&s->frame_infos);
676  av_freep(vpp);
677 
678  return 0;
679 }
680 
682 {
683  AVFilterContext *ctx = inlink->dst;
684  AVFilterLink *outlink = ctx->outputs[0];
685  mfxSyncPoint sync;
686  QSVFrame *in_frame, *out_frame;
687  int ret, filter_ret;
688 
689  in_frame = submit_frame(s, inlink, picref);
690  if (!in_frame) {
691  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
692  FF_INLINK_IDX(inlink));
693  return AVERROR(ENOMEM);
694  }
695 
696  do {
697  out_frame = query_frame(s, outlink);
698  if (!out_frame) {
699  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
700  return AVERROR(ENOMEM);
701  }
702 
703  do {
704  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
705  out_frame->surface, NULL, &sync);
706  if (ret == MFX_WRN_DEVICE_BUSY)
707  av_usleep(500);
708  } while (ret == MFX_WRN_DEVICE_BUSY);
709 
710  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
711  /* Ignore more_data error */
712  if (ret == MFX_ERR_MORE_DATA)
713  ret = AVERROR(EAGAIN);
714  break;
715  }
716 
717  if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
718  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
719 
720  out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
721  default_tb, outlink->time_base);
722 
723  filter_ret = s->filter_frame(outlink, out_frame->frame);
724  if (filter_ret < 0) {
725  av_frame_free(&out_frame->frame);
726  ret = filter_ret;
727  break;
728  }
729  out_frame->frame = NULL;
730  } while(ret == MFX_ERR_MORE_SURFACE);
731 
732  return ret;
733 }
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:768
static enum AVPixelFormat pix_fmt
static const char * format[]
Definition: af_aiir.c:311
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2363
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
static const mfxHandleType handle_types[]
Definition: qsvvpp.c:71
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:254
mfxFrameInfo * frame_infos
Definition: qsvvpp.c:53
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
mfxSession session
Definition: qsvvpp.c:49
mfxHandleType handle_type
Definition: hwcontext_qsv.c:75
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:65
int in_mem_mode
Definition: qsvvpp.c:56
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:152
int nb_surface_ptrs_in
Definition: qsvvpp.c:60
int num
Numerator.
Definition: rational.h:59
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:360
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in...
Definition: avfilter.h:394
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:41
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:208
mfxFrameSurface1 * surface
Definition: qsvvpp.c:42
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVFrame * in_frame_list
Definition: qsvvpp.c:58
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
mfxExtBuffer ** ext_buffers
Definition: qsvvpp.c:67
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:114
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:554
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:136
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
mfxFrameSurface1 ** surface_ptrs_in
Definition: qsvvpp.c:62
int nb_ext_buffers
Definition: qsvvpp.c:68
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.c:50
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:311
static const AVRational default_tb
Definition: qsvvpp.c:77
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:231
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
int w
Definition: qsvvpp.h:46
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:365
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: qsvvpp.c:66
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
int num_ext_buf
Definition: qsvvpp.h:54
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
int width
Definition: frame.h:276
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
enum AVPixelFormat out_sw_format
Definition: qsvvpp.c:51
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:85
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:329
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:350
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:465
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:790
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:51
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:198
int x
Definition: qsvvpp.h:46
unsigned nb_inputs
number of input pads
Definition: avfilter.h:347
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:80
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:402
mfxVideoParam vpp_param
Definition: qsvvpp.c:52
AVFormatContext * ctx
Definition: movenc.c:48
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:277
int num_crop
Definition: qsvvpp.h:61
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:125
mfxFrameSurface1 surface_internal
Definition: qsvvpp.c:43
mfxFrameSurface1 surface
Definition: qsv_internal.h:56
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:538
#define FF_ARRAY_ELEMS(a)
mfxSession session
Definition: hwcontext_qsv.h:36
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:291
int y
Definition: qsvvpp.h:46
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:681
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:342
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:64
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:161
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:63
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:657
Rational number (pair of numerator and denominator).
Definition: rational.h:58
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:218
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:140
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:55
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:348
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:242
A reference to a data buffer.
Definition: buffer.h:81
int
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal and external API header
if(ret< 0)
Definition: vf_mcdeint.c:279
mfxFrameSurface1 ** surface_ptrs_out
Definition: qsvvpp.c:63
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:243
int h
Crop rectangle.
Definition: qsvvpp.h:46
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:130
QSVFrame * out_frame_list
Definition: qsvvpp.c:59
int den
Denominator.
Definition: rational.h:60
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
int in_idx
Input index.
Definition: qsvvpp.h:45
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:370
int nb_surface_ptrs_out
Definition: qsvvpp.c:61
An instance of a filter.
Definition: avfilter.h:338
QSVVPPCrop * crop
Definition: qsvvpp.h:62
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:58
Intel Quick Sync Video VPP base function.
int height
Definition: frame.h:276
FILE * out
Definition: movenc.c:54
#define av_freep(p)
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:55
internal API functions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
for(j=16;j >0;--j)
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:184
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:652
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:120
int out_mem_mode
Definition: qsvvpp.c:57
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:191