FFmpeg
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 
40 typedef struct QSVFrame {
41  AVFrame *frame;
42  mfxFrameSurface1 *surface;
43  mfxFrameSurface1 surface_internal; /* for system memory */
44  struct QSVFrame *next;
45 } QSVFrame;
46 
47 /* abstract struct for all QSV filters */
48 struct QSVVPPContext {
49  mfxSession session;
50  int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
51  enum AVPixelFormat out_sw_format; /* Real output format */
52  mfxVideoParam vpp_param;
53  mfxFrameInfo *frame_infos; /* frame info for each input */
54 
55  /* members related to the input/output surface */
62  mfxFrameSurface1 **surface_ptrs_in;
63  mfxFrameSurface1 **surface_ptrs_out;
64 
65  /* MFXVPP extern parameters */
66  mfxExtOpaqueSurfaceAlloc opaque_alloc;
67  mfxExtBuffer **ext_buffers;
69 };
70 
71 static const mfxHandleType handle_types[] = {
72  MFX_HANDLE_VA_DISPLAY,
73  MFX_HANDLE_D3D9_DEVICE_MANAGER,
74  MFX_HANDLE_D3D11_DEVICE,
75 };
76 
77 static const AVRational default_tb = { 1, 90000 };
78 
79 static const struct {
81  const char *desc;
82 } qsv_iopatterns[] = {
83  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
84  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
85  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
86  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
87  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
88  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
89 };
90 
92  const char *extra_string)
93 {
94  const char *desc = NULL;
95 
96  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
98  desc = qsv_iopatterns[i].desc;
99  }
100  }
101  if (!desc)
102  desc = "unknown iopattern";
103 
104  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
105  return 0;
106 }
107 
108 static const struct {
109  mfxStatus mfxerr;
110  int averr;
111  const char *desc;
112 } qsv_errors[] = {
113  { MFX_ERR_NONE, 0, "success" },
114  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
115  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
116  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
117  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
118  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
119  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
120  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
121  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
122  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
123  /* the following 3 errors should always be handled explicitly, so those "mappings"
124  * are for completeness only */
125  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
126  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
127  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
128  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
129  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
130  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
131  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
132  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
133  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
134  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
135  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
136 
137  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
138  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
139  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
140  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
141  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
142  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
143  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
144  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
145  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
146 };
147 
148 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
149 {
150  int i;
151  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
152  if (qsv_errors[i].mfxerr == mfx_err) {
153  if (desc)
154  *desc = qsv_errors[i].desc;
155  return qsv_errors[i].averr;
156  }
157  }
158  if (desc)
159  *desc = "unknown error";
160  return AVERROR_UNKNOWN;
161 }
162 
163 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
164  const char *error_string)
165 {
166  const char *desc;
167  int ret;
168  ret = qsv_map_error(err, &desc);
169  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
170  return ret;
171 }
172 
173 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
174  const char *warning_string)
175 {
176  const char *desc;
177  int ret;
178  ret = qsv_map_error(err, &desc);
179  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
180  return ret;
181 }
182 
183 /* functions for frameAlloc */
184 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
185  mfxFrameAllocResponse *resp)
186 {
187  QSVVPPContext *s = pthis;
188  int i;
189 
190  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
191  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
192  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
193  return MFX_ERR_UNSUPPORTED;
194 
195  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
196  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
197  if (!resp->mids)
198  return AVERROR(ENOMEM);
199 
200  for (i = 0; i < s->nb_surface_ptrs_in; i++)
201  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
202 
203  resp->NumFrameActual = s->nb_surface_ptrs_in;
204  } else {
205  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
206  if (!resp->mids)
207  return AVERROR(ENOMEM);
208 
209  for (i = 0; i < s->nb_surface_ptrs_out; i++)
210  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
211 
212  resp->NumFrameActual = s->nb_surface_ptrs_out;
213  }
214 
215  return MFX_ERR_NONE;
216 }
217 
218 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
219 {
220  av_freep(&resp->mids);
221  return MFX_ERR_NONE;
222 }
223 
224 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
225 {
226  return MFX_ERR_UNSUPPORTED;
227 }
228 
229 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
230 {
231  return MFX_ERR_UNSUPPORTED;
232 }
233 
234 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
235 {
236  *hdl = mid;
237  return MFX_ERR_NONE;
238 }
239 
241 {
242  switch (format) {
243  case AV_PIX_FMT_YUV420P:
244  return MFX_FOURCC_YV12;
245  case AV_PIX_FMT_NV12:
246  return MFX_FOURCC_NV12;
247  case AV_PIX_FMT_YUYV422:
248  return MFX_FOURCC_YUY2;
249  case AV_PIX_FMT_BGRA:
250  return MFX_FOURCC_RGB4;
251  }
252 
253  return MFX_FOURCC_NV12;
254 }
255 
256 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
257 {
258  switch (frame->format) {
259  case AV_PIX_FMT_NV12:
260  case AV_PIX_FMT_P010:
261  surface->Data.Y = frame->data[0];
262  surface->Data.UV = frame->data[1];
263  break;
264  case AV_PIX_FMT_YUV420P:
265  surface->Data.Y = frame->data[0];
266  surface->Data.U = frame->data[1];
267  surface->Data.V = frame->data[2];
268  break;
269  case AV_PIX_FMT_YUYV422:
270  surface->Data.Y = frame->data[0];
271  surface->Data.U = frame->data[0] + 1;
272  surface->Data.V = frame->data[0] + 3;
273  break;
274  case AV_PIX_FMT_RGB32:
275  surface->Data.B = frame->data[0];
276  surface->Data.G = frame->data[0] + 1;
277  surface->Data.R = frame->data[0] + 2;
278  surface->Data.A = frame->data[0] + 3;
279  break;
280  default:
281  return MFX_ERR_UNSUPPORTED;
282  }
283  surface->Data.Pitch = frame->linesize[0];
284 
285  return 0;
286 }
287 
288 /* fill the surface info */
289 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
290 {
291  enum AVPixelFormat pix_fmt;
292  AVHWFramesContext *frames_ctx;
293  AVQSVFramesContext *frames_hwctx;
294  const AVPixFmtDescriptor *desc;
295 
296  if (link->format == AV_PIX_FMT_QSV) {
297  if (!link->hw_frames_ctx)
298  return AVERROR(EINVAL);
299 
300  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
301  frames_hwctx = frames_ctx->hwctx;
302  *frameinfo = frames_hwctx->surfaces[0].Info;
303  } else {
304  pix_fmt = link->format;
306  if (!desc)
307  return AVERROR_BUG;
308 
309  frameinfo->CropX = 0;
310  frameinfo->CropY = 0;
311  frameinfo->Width = FFALIGN(link->w, 32);
312  frameinfo->Height = FFALIGN(link->h, 32);
313  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
314  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
315  frameinfo->BitDepthLuma = desc->comp[0].depth;
316  frameinfo->BitDepthChroma = desc->comp[0].depth;
317  frameinfo->Shift = desc->comp[0].depth > 8;
318  if (desc->log2_chroma_w && desc->log2_chroma_h)
319  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
320  else if (desc->log2_chroma_w)
321  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
322  else
323  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
324  }
325 
326  frameinfo->CropW = link->w;
327  frameinfo->CropH = link->h;
328  frameinfo->FrameRateExtN = link->frame_rate.num;
329  frameinfo->FrameRateExtD = link->frame_rate.den;
330  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
331  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
332 
333  return 0;
334 }
335 
337 {
338  while (list) {
339  if (list->surface && !list->surface->Data.Locked) {
340  list->surface = NULL;
341  av_frame_free(&list->frame);
342  }
343  list = list->next;
344  }
345 }
346 
348 {
349  while (*list) {
350  QSVFrame *frame;
351 
352  frame = *list;
353  *list = (*list)->next;
354  av_frame_free(&frame->frame);
355  av_freep(&frame);
356  }
357 }
358 
360 {
361  QSVFrame *out = *list;
362 
363  for (; out; out = out->next) {
364  if (!out->surface)
365  break;
366  }
367 
368  if (!out) {
369  out = av_mallocz(sizeof(*out));
370  if (!out) {
371  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
372  return NULL;
373  }
374  out->next = *list;
375  *list = out;
376  }
377 
378  return out;
379 }
380 
381 /* get the input surface */
383 {
384  QSVFrame *qsv_frame;
385  AVFilterContext *ctx = inlink->dst;
386 
387  clear_unused_frames(s->in_frame_list);
388 
389  qsv_frame = get_free_frame(&s->in_frame_list);
390  if (!qsv_frame)
391  return NULL;
392 
393  /* Turn AVFrame into mfxFrameSurface1.
394  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
395  * mfxFrameSurface1 is stored in AVFrame->data[3];
396  * for system memory mode, raw video data is stored in
397  * AVFrame, we should map it into mfxFrameSurface1.
398  */
399  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
400  if (picref->format != AV_PIX_FMT_QSV) {
401  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
402  return NULL;
403  }
404  qsv_frame->frame = av_frame_clone(picref);
405  qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
406  } else {
407  /* make a copy if the input is not padded as libmfx requires */
408  if (picref->height & 31 || picref->linesize[0] & 31) {
409  qsv_frame->frame = ff_get_video_buffer(inlink,
410  FFALIGN(inlink->w, 32),
411  FFALIGN(inlink->h, 32));
412  if (!qsv_frame->frame)
413  return NULL;
414 
415  qsv_frame->frame->width = picref->width;
416  qsv_frame->frame->height = picref->height;
417 
418  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
419  av_frame_free(&qsv_frame->frame);
420  return NULL;
421  }
422 
423  av_frame_copy_props(qsv_frame->frame, picref);
424  } else
425  qsv_frame->frame = av_frame_clone(picref);
426 
427  if (map_frame_to_surface(qsv_frame->frame,
428  &qsv_frame->surface_internal) < 0) {
429  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
430  return NULL;
431  }
432  qsv_frame->surface = &qsv_frame->surface_internal;
433  }
434 
435  qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
436  qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
437  inlink->time_base, default_tb);
438 
439  qsv_frame->surface->Info.PicStruct =
440  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
441  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
442  MFX_PICSTRUCT_FIELD_BFF);
443  if (qsv_frame->frame->repeat_pict == 1)
444  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
445  else if (qsv_frame->frame->repeat_pict == 2)
446  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
447  else if (qsv_frame->frame->repeat_pict == 4)
448  qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
449 
450  return qsv_frame;
451 }
452 
453 /* get the output surface */
455 {
456  AVFilterContext *ctx = outlink->src;
457  QSVFrame *out_frame;
458  int ret;
459 
460  clear_unused_frames(s->out_frame_list);
461 
462  out_frame = get_free_frame(&s->out_frame_list);
463  if (!out_frame)
464  return NULL;
465 
466  /* For video memory, get a hw frame;
467  * For system memory, get a sw frame and map it into a mfx_surface. */
468  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
469  out_frame->frame = av_frame_alloc();
470  if (!out_frame->frame)
471  return NULL;
472 
473  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
474  if (ret < 0) {
475  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
476  return NULL;
477  }
478 
479  out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
480  } else {
481  /* Get a frame with aligned dimensions.
482  * Libmfx need system memory being 128x64 aligned */
483  out_frame->frame = ff_get_video_buffer(outlink,
484  FFALIGN(outlink->w, 128),
485  FFALIGN(outlink->h, 64));
486  if (!out_frame->frame)
487  return NULL;
488 
489  out_frame->frame->width = outlink->w;
490  out_frame->frame->height = outlink->h;
491 
492  ret = map_frame_to_surface(out_frame->frame,
493  &out_frame->surface_internal);
494  if (ret < 0)
495  return NULL;
496 
497  out_frame->surface = &out_frame->surface_internal;
498  }
499 
500  out_frame->surface->Info = s->vpp_param.vpp.Out;
501 
502  return out_frame;
503 }
504 
505 /* create the QSV session */
507 {
508  AVFilterLink *inlink = avctx->inputs[0];
509  AVFilterLink *outlink = avctx->outputs[0];
510  AVQSVFramesContext *in_frames_hwctx = NULL;
511  AVQSVFramesContext *out_frames_hwctx = NULL;
512 
513  AVBufferRef *device_ref;
514  AVHWDeviceContext *device_ctx;
515  AVQSVDeviceContext *device_hwctx;
516  mfxHDL handle;
517  mfxHandleType handle_type;
518  mfxVersion ver;
519  mfxIMPL impl;
520  int ret, i;
521 
522  if (inlink->hw_frames_ctx) {
523  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
524 
525  device_ref = frames_ctx->device_ref;
526  in_frames_hwctx = frames_ctx->hwctx;
527 
528  s->in_mem_mode = in_frames_hwctx->frame_type;
529 
530  s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
531  sizeof(*s->surface_ptrs_in));
532  if (!s->surface_ptrs_in)
533  return AVERROR(ENOMEM);
534 
535  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
536  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
537 
538  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
539  } else if (avctx->hw_device_ctx) {
540  device_ref = avctx->hw_device_ctx;
541  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
542  } else {
543  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
544  return AVERROR(EINVAL);
545  }
546 
547  device_ctx = (AVHWDeviceContext *)device_ref->data;
548  device_hwctx = device_ctx->hwctx;
549 
550  if (outlink->format == AV_PIX_FMT_QSV) {
551  AVHWFramesContext *out_frames_ctx;
552  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
553  if (!out_frames_ref)
554  return AVERROR(ENOMEM);
555 
556  s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
557  MFX_MEMTYPE_OPAQUE_FRAME :
558  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
559 
560  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
561  out_frames_hwctx = out_frames_ctx->hwctx;
562 
563  out_frames_ctx->format = AV_PIX_FMT_QSV;
564  out_frames_ctx->width = FFALIGN(outlink->w, 32);
565  out_frames_ctx->height = FFALIGN(outlink->h, 32);
566  out_frames_ctx->sw_format = s->out_sw_format;
567  out_frames_ctx->initial_pool_size = 64;
568  if (avctx->extra_hw_frames > 0)
569  out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
570  out_frames_hwctx->frame_type = s->out_mem_mode;
571 
572  ret = av_hwframe_ctx_init(out_frames_ref);
573  if (ret < 0) {
574  av_buffer_unref(&out_frames_ref);
575  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
576  return ret;
577  }
578 
579  s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
580  sizeof(*s->surface_ptrs_out));
581  if (!s->surface_ptrs_out) {
582  av_buffer_unref(&out_frames_ref);
583  return AVERROR(ENOMEM);
584  }
585 
586  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
587  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
588  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
589 
590  av_buffer_unref(&outlink->hw_frames_ctx);
591  outlink->hw_frames_ctx = out_frames_ref;
592  } else
593  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
594 
595  /* extract the properties of the "master" session given to us */
596  ret = MFXQueryIMPL(device_hwctx->session, &impl);
597  if (ret == MFX_ERR_NONE)
598  ret = MFXQueryVersion(device_hwctx->session, &ver);
599  if (ret != MFX_ERR_NONE) {
600  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
601  return AVERROR_UNKNOWN;
602  }
603 
604  for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
605  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
606  if (ret == MFX_ERR_NONE) {
608  break;
609  }
610  }
611 
612  if (ret < 0)
613  return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
614  else if (ret > 0) {
615  ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
616  return AVERROR_UNKNOWN;
617  }
618 
619  /* create a "slave" session with those same properties, to be used for vpp */
620  ret = MFXInit(impl, &ver, &s->session);
621  if (ret < 0)
622  return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
623  else if (ret > 0) {
624  ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
625  return AVERROR_UNKNOWN;
626  }
627 
628  if (handle) {
629  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
630  if (ret != MFX_ERR_NONE)
631  return AVERROR_UNKNOWN;
632  }
633 
634  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
635  ret = MFXJoinSession(device_hwctx->session, s->session);
636  if (ret != MFX_ERR_NONE)
637  return AVERROR_UNKNOWN;
638  }
639 
640  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
641  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
642  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
643  s->opaque_alloc.In.Type = s->in_mem_mode;
644 
645  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
646  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
647  s->opaque_alloc.Out.Type = s->out_mem_mode;
648 
649  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
650  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
651  } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
652  mfxFrameAllocator frame_allocator = {
653  .pthis = s,
654  .Alloc = frame_alloc,
655  .Lock = frame_lock,
656  .Unlock = frame_unlock,
657  .GetHDL = frame_get_hdl,
658  .Free = frame_free,
659  };
660 
661  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
662  if (ret != MFX_ERR_NONE)
663  return AVERROR_UNKNOWN;
664  }
665 
666  return 0;
667 }
668 
670 {
671  int i;
672  int ret;
673  QSVVPPContext *s;
674 
675  s = av_mallocz(sizeof(*s));
676  if (!s)
677  return AVERROR(ENOMEM);
678 
679  s->filter_frame = param->filter_frame;
680  if (!s->filter_frame)
681  s->filter_frame = ff_filter_frame;
682  s->out_sw_format = param->out_sw_format;
683 
684  /* create the vpp session */
685  ret = init_vpp_session(avctx, s);
686  if (ret < 0)
687  goto failed;
688 
689  s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
690  if (!s->frame_infos) {
691  ret = AVERROR(ENOMEM);
692  goto failed;
693  }
694 
695  /* Init each input's information */
696  for (i = 0; i < avctx->nb_inputs; i++) {
697  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
698  if (ret < 0)
699  goto failed;
700  }
701 
702  /* Update input's frame info according to crop */
703  for (i = 0; i < param->num_crop; i++) {
704  QSVVPPCrop *crop = param->crop + i;
705  if (crop->in_idx > avctx->nb_inputs) {
706  ret = AVERROR(EINVAL);
707  goto failed;
708  }
709  s->frame_infos[crop->in_idx].CropX = crop->x;
710  s->frame_infos[crop->in_idx].CropY = crop->y;
711  s->frame_infos[crop->in_idx].CropW = crop->w;
712  s->frame_infos[crop->in_idx].CropH = crop->h;
713  }
714 
715  s->vpp_param.vpp.In = s->frame_infos[0];
716 
717  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
718  if (ret < 0) {
719  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
720  goto failed;
721  }
722 
723  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
724  s->nb_ext_buffers = param->num_ext_buf + 1;
725  s->ext_buffers = av_mallocz_array(s->nb_ext_buffers, sizeof(*s->ext_buffers));
726  if (!s->ext_buffers) {
727  ret = AVERROR(ENOMEM);
728  goto failed;
729  }
730 
731  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
732  for (i = 1; i < param->num_ext_buf; i++)
733  s->ext_buffers[i] = param->ext_buf[i - 1];
734  s->vpp_param.ExtParam = s->ext_buffers;
735  s->vpp_param.NumExtParam = s->nb_ext_buffers;
736  } else {
737  s->vpp_param.NumExtParam = param->num_ext_buf;
738  s->vpp_param.ExtParam = param->ext_buf;
739  }
740 
741  s->vpp_param.AsyncDepth = 1;
742 
743  if (IS_SYSTEM_MEMORY(s->in_mem_mode))
744  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
745  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
746  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
747  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
748  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
749 
750  if (IS_SYSTEM_MEMORY(s->out_mem_mode))
751  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
752  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
753  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
754  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
755  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
756 
757  /* Print input memory mode */
758  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
759  /* Print output memory mode */
760  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
761  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
762  if (ret < 0) {
763  ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
764  goto failed;
765  } else if (ret > 0)
766  ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
767 
768  *vpp = s;
769  return 0;
770 
771 failed:
772  ff_qsvvpp_free(&s);
773 
774  return ret;
775 }
776 
778 {
779  QSVVPPContext *s = *vpp;
780 
781  if (!s)
782  return 0;
783 
784  if (s->session) {
785  MFXVideoVPP_Close(s->session);
786  MFXClose(s->session);
787  }
788 
789  /* release all the resources */
790  clear_frame_list(&s->in_frame_list);
791  clear_frame_list(&s->out_frame_list);
792  av_freep(&s->surface_ptrs_in);
793  av_freep(&s->surface_ptrs_out);
794  av_freep(&s->ext_buffers);
795  av_freep(&s->frame_infos);
796  av_freep(vpp);
797 
798  return 0;
799 }
800 
802 {
803  AVFilterContext *ctx = inlink->dst;
804  AVFilterLink *outlink = ctx->outputs[0];
805  mfxSyncPoint sync;
806  QSVFrame *in_frame, *out_frame;
807  int ret, filter_ret;
808 
809  in_frame = submit_frame(s, inlink, picref);
810  if (!in_frame) {
811  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
813  return AVERROR(ENOMEM);
814  }
815 
816  do {
817  out_frame = query_frame(s, outlink);
818  if (!out_frame) {
819  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
820  return AVERROR(ENOMEM);
821  }
822 
823  do {
824  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
825  out_frame->surface, NULL, &sync);
826  if (ret == MFX_WRN_DEVICE_BUSY)
827  av_usleep(500);
828  } while (ret == MFX_WRN_DEVICE_BUSY);
829 
830  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
831  /* Ignore more_data error */
832  if (ret == MFX_ERR_MORE_DATA)
833  ret = AVERROR(EAGAIN);
834  break;
835  }
836 
837  if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
838  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
839 
840  out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
841  default_tb, outlink->time_base);
842 
843  filter_ret = s->filter_frame(outlink, out_frame->frame);
844  if (filter_ret < 0) {
845  av_frame_free(&out_frame->frame);
846  ret = filter_ret;
847  break;
848  }
849  out_frame->frame = NULL;
850  } while(ret == MFX_ERR_MORE_SURFACE);
851 
852  return ret;
853 }
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:234
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
QSVVPPCrop::in_idx
int in_idx
Input index.
Definition: qsvvpp.h:45
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
mfx_iopattern
int mfx_iopattern
Definition: qsvvpp.c:80
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
QSVVPPContext::nb_surface_ptrs_in
int nb_surface_ptrs_in
Definition: qsvvpp.c:60
handle_types
static const mfxHandleType handle_types[]
Definition: qsvvpp.c:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
IS_OPAQUE_MEMORY
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
out
FILE * out
Definition: movenc.c:54
init_vpp_session
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:506
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:184
QSVVPPParam::crop
QSVVPPCrop * crop
Definition: qsvvpp.h:62
QSVVPPParam::out_sw_format
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:58
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:92
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
QSVVPPContext::session
mfxSession session
Definition: qsvvpp.c:49
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:411
AVFrame::width
int width
Definition: frame.h:376
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:470
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:190
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:397
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
mathematics.h
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
video.h
QSVFrame::frame
AVFrame * frame
Definition: qsv_internal.h:73
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
QSVVPPContext::out_mem_mode
int out_mem_mode
Definition: qsvvpp.c:57
QSVVPPContext::surface_ptrs_in
mfxFrameSurface1 ** surface_ptrs_in
Definition: qsvvpp.c:62
qsvvpp.h
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:427
clear_unused_frames
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:336
QSVVPPContext::frame_infos
mfxFrameInfo * frame_infos
Definition: qsvvpp.c:53
AVRational::num
int num
Numerator.
Definition: rational.h:59
handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:89
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
ff_qsvvpp_print_iopattern
int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsvvpp.c:91
QSVVPPContext::in_frame_list
QSVFrame * in_frame_list
Definition: qsvvpp.c:58
QSVVPPContext::out_sw_format
enum AVPixelFormat out_sw_format
Definition: qsvvpp.c:51
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSVVPPCrop::w
int w
Definition: qsvvpp.h:46
s
#define s(width, name)
Definition: cbs_vp9.c:257
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
ff_qsvvpp_create
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:669
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:59
QSVVPPContext::vpp_param
mfxVideoParam vpp_param
Definition: qsvvpp.c:52
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
default_tb
static const AVRational default_tb
Definition: qsvvpp.c:77
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
QSVFrame::surface
mfxFrameSurface1 * surface
Definition: qsvvpp.c:42
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
if
if(ret)
Definition: filter_design.txt:179
fill_frameinfo_by_link
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:289
QSVFrame
Definition: qsv_internal.h:72
QSVVPPContext
Definition: qsvvpp.c:48
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:658
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
QSVVPPParam::num_crop
int num_crop
Definition: qsvvpp.h:61
QSVVPPParam
Definition: qsvvpp.h:49
QSVVPPCrop::x
int x
Definition: qsvvpp.h:46
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
qsv_iopatterns
static const struct @211 qsv_iopatterns[]
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
pix_fmt_to_mfx_fourcc
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:240
ff_qsvvpp_print_error
int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsvvpp.c:163
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:349
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
QSVFrame::surface
mfxFrameSurface1 surface
Definition: qsv_internal.h:74
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
ff_qsvvpp_free
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:777
AVFilterContext::nb_inputs
unsigned nb_inputs
number of input pads
Definition: avfilter.h:350
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:799
IS_VIDEO_MEMORY
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:229
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:391
mfxerr
mfxStatus mfxerr
Definition: qsvvpp.c:109
QSVVPPContext::nb_ext_buffers
int nb_ext_buffers
Definition: qsvvpp.c:68
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:372
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
internal.h
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:465
hwcontext_qsv.h
i
int i
Definition: input.c:407
QSVVPPContext::filter_frame
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.c:50
common.h
desc
const char * desc
Definition: qsvvpp.c:81
QSVVPPParam::num_ext_buf
int num_ext_buf
Definition: qsvvpp.h:54
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:224
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
QSVVPPParam::filter_frame
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:51
QSVVPPContext::surface_ptrs_out
mfxFrameSurface1 ** surface_ptrs_out
Definition: qsvvpp.c:63
map_frame_to_surface
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:256
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:406
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:657
QSVVPPCrop::h
int h
Crop rectangle.
Definition: qsvvpp.h:46
QSVVPPCrop::y
int y
Definition: qsvvpp.h:46
AVFrame::height
int height
Definition: frame.h:376
QSVVPPContext::ext_buffers
mfxExtBuffer ** ext_buffers
Definition: qsvvpp.c:67
ff_qsvvpp_filter_frame
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:801
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AVRational::den
int den
Denominator.
Definition: rational.h:60
averr
int averr
Definition: qsvvpp.c:110
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:302
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Definition: qsvvpp.c:148
clear_frame_list
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:347
QSVFrame::surface_internal
mfxFrameSurface1 surface_internal
Definition: qsvvpp.c:43
AVFilterContext
An instance of a filter.
Definition: avfilter.h:341
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:84
QSVVPPContext::in_mem_mode
int in_mem_mode
Definition: qsvvpp.c:56
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:48
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
QSVVPPContext::out_frame_list
QSVFrame * out_frame_list
Definition: qsvvpp.c:59
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
IS_SYSTEM_MEMORY
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVVPPCrop
Definition: qsvvpp.h:44
get_free_frame
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:359
qsv_errors
static const struct @212 qsv_errors[]
QSVFrame::next
struct QSVFrame * next
Definition: qsv_internal.h:82
int
int
Definition: ffmpeg_filter.c:170
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:502
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:218
QSVVPPContext::nb_surface_ptrs_out
int nb_surface_ptrs_out
Definition: qsvvpp.c:61
ff_qsvvpp_print_warning
int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsvvpp.c:173
AVFrame::repeat_pict
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:460
QSVVPPParam::ext_buf
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:55
QSVVPPContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: qsvvpp.c:66
submit_frame
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:382
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:353
query_frame
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:454