FFmpeg
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
40 
41 static const AVRational default_tb = { 1, 90000 };
42 
43 typedef struct QSVAsyncFrame {
44  mfxSyncPoint sync;
45  QSVFrame *frame;
47 
48 static const struct {
50  const char *desc;
51 } qsv_iopatterns[] = {
52  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
53  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
54  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
55  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
56  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
57  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
58 };
59 
61  const char *extra_string)
62 {
63  const char *desc = NULL;
64 
65  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
67  desc = qsv_iopatterns[i].desc;
68  }
69  }
70  if (!desc)
71  desc = "unknown iopattern";
72 
73  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
74  return 0;
75 }
76 
77 static const struct {
78  mfxStatus mfxerr;
79  int averr;
80  const char *desc;
81 } qsv_errors[] = {
82  { MFX_ERR_NONE, 0, "success" },
83  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
84  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
85  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
86  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
87  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
88  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
89  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
90  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
91  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
92  /* the following 3 errors should always be handled explicitly, so those "mappings"
93  * are for completeness only */
94  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
95  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
96  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
97  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
98  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
99  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
100  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
101  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
102  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
103  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
104  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
105  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
106  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
107 
108  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
109  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
110  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
111  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
112  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
113  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
114  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
115  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
116  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
117 
118 #if QSV_VERSION_ATLEAST(1, 31)
119  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
120 #endif
121 };
122 
123 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
124 {
125  int i;
126  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
127  if (qsv_errors[i].mfxerr == mfx_err) {
128  if (desc)
129  *desc = qsv_errors[i].desc;
130  return qsv_errors[i].averr;
131  }
132  }
133  if (desc)
134  *desc = "unknown error";
135  return AVERROR_UNKNOWN;
136 }
137 
138 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
139  const char *error_string)
140 {
141  const char *desc;
142  int ret;
143  ret = qsv_map_error(err, &desc);
144  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
145  return ret;
146 }
147 
148 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
149  const char *warning_string)
150 {
151  const char *desc;
152  int ret;
153  ret = qsv_map_error(err, &desc);
154  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
155  return ret;
156 }
157 
158 /* functions for frameAlloc */
159 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
160  mfxFrameAllocResponse *resp)
161 {
162  QSVVPPContext *s = pthis;
163  int i;
164 
165  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
166  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
167  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
168  return MFX_ERR_UNSUPPORTED;
169 
170  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
171  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
172  if (!resp->mids)
173  return AVERROR(ENOMEM);
174 
175  for (i = 0; i < s->nb_surface_ptrs_in; i++)
176  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
177 
178  resp->NumFrameActual = s->nb_surface_ptrs_in;
179  } else {
180  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
181  if (!resp->mids)
182  return AVERROR(ENOMEM);
183 
184  for (i = 0; i < s->nb_surface_ptrs_out; i++)
185  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
186 
187  resp->NumFrameActual = s->nb_surface_ptrs_out;
188  }
189 
190  return MFX_ERR_NONE;
191 }
192 
193 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
194 {
195  av_freep(&resp->mids);
196  return MFX_ERR_NONE;
197 }
198 
199 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
200 {
201  return MFX_ERR_UNSUPPORTED;
202 }
203 
204 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
205 {
206  return MFX_ERR_UNSUPPORTED;
207 }
208 
209 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
210 {
211  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
212  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
213 
214  pair_dst->first = pair_src->first;
215 
216  if (pair_src->second != (mfxMemId)MFX_INFINITE)
217  pair_dst->second = pair_src->second;
218  return MFX_ERR_NONE;
219 }
220 
222 {
223  switch (format) {
224  case AV_PIX_FMT_YUV420P:
225  return MFX_FOURCC_YV12;
226  case AV_PIX_FMT_NV12:
227  return MFX_FOURCC_NV12;
228  case AV_PIX_FMT_YUYV422:
229  return MFX_FOURCC_YUY2;
230  case AV_PIX_FMT_BGRA:
231  return MFX_FOURCC_RGB4;
232  }
233 
234  return MFX_FOURCC_NV12;
235 }
236 
237 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
238 {
239  switch (frame->format) {
240  case AV_PIX_FMT_NV12:
241  case AV_PIX_FMT_P010:
242  surface->Data.Y = frame->data[0];
243  surface->Data.UV = frame->data[1];
244  break;
245  case AV_PIX_FMT_YUV420P:
246  surface->Data.Y = frame->data[0];
247  surface->Data.U = frame->data[1];
248  surface->Data.V = frame->data[2];
249  break;
250  case AV_PIX_FMT_YUYV422:
251  surface->Data.Y = frame->data[0];
252  surface->Data.U = frame->data[0] + 1;
253  surface->Data.V = frame->data[0] + 3;
254  break;
255  case AV_PIX_FMT_RGB32:
256  surface->Data.B = frame->data[0];
257  surface->Data.G = frame->data[0] + 1;
258  surface->Data.R = frame->data[0] + 2;
259  surface->Data.A = frame->data[0] + 3;
260  break;
261  default:
262  return MFX_ERR_UNSUPPORTED;
263  }
264  surface->Data.Pitch = frame->linesize[0];
265 
266  return 0;
267 }
268 
269 /* fill the surface info */
270 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
271 {
272  enum AVPixelFormat pix_fmt;
273  AVHWFramesContext *frames_ctx;
274  AVQSVFramesContext *frames_hwctx;
275  const AVPixFmtDescriptor *desc;
276 
277  if (link->format == AV_PIX_FMT_QSV) {
278  if (!link->hw_frames_ctx)
279  return AVERROR(EINVAL);
280 
281  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
282  frames_hwctx = frames_ctx->hwctx;
283  *frameinfo = frames_hwctx->surfaces[0].Info;
284  } else {
285  pix_fmt = link->format;
287  if (!desc)
288  return AVERROR_BUG;
289 
290  frameinfo->CropX = 0;
291  frameinfo->CropY = 0;
292  frameinfo->Width = FFALIGN(link->w, 32);
293  frameinfo->Height = FFALIGN(link->h, 32);
294  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
295  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
296  frameinfo->BitDepthLuma = desc->comp[0].depth;
297  frameinfo->BitDepthChroma = desc->comp[0].depth;
298  frameinfo->Shift = desc->comp[0].depth > 8;
299  if (desc->log2_chroma_w && desc->log2_chroma_h)
300  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
301  else if (desc->log2_chroma_w)
302  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
303  else
304  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
305  }
306 
307  frameinfo->CropW = link->w;
308  frameinfo->CropH = link->h;
309  frameinfo->FrameRateExtN = link->frame_rate.num;
310  frameinfo->FrameRateExtD = link->frame_rate.den;
311  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
312  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
313 
314  return 0;
315 }
316 
318 {
319  while (list) {
320  /* list->queued==1 means the frame is not cached in VPP
321  * process any more, it can be released to pool. */
322  if ((list->queued == 1) && !list->surface.Data.Locked) {
323  av_frame_free(&list->frame);
324  list->queued = 0;
325  }
326  list = list->next;
327  }
328 }
329 
331 {
332  while (*list) {
333  QSVFrame *frame;
334 
335  frame = *list;
336  *list = (*list)->next;
337  av_frame_free(&frame->frame);
338  av_freep(&frame);
339  }
340 }
341 
343 {
344  QSVFrame *out = *list;
345 
346  for (; out; out = out->next) {
347  if (!out->queued) {
348  out->queued = 1;
349  break;
350  }
351  }
352 
353  if (!out) {
354  out = av_mallocz(sizeof(*out));
355  if (!out) {
356  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
357  return NULL;
358  }
359  out->queued = 1;
360  out->next = *list;
361  *list = out;
362  }
363 
364  return out;
365 }
366 
367 /* get the input surface */
369 {
370  QSVFrame *qsv_frame;
371  AVFilterContext *ctx = inlink->dst;
372 
373  clear_unused_frames(s->in_frame_list);
374 
375  qsv_frame = get_free_frame(&s->in_frame_list);
376  if (!qsv_frame)
377  return NULL;
378 
379  /* Turn AVFrame into mfxFrameSurface1.
380  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
381  * mfxFrameSurface1 is stored in AVFrame->data[3];
382  * for system memory mode, raw video data is stored in
383  * AVFrame, we should map it into mfxFrameSurface1.
384  */
385  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
386  if (picref->format != AV_PIX_FMT_QSV) {
387  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
388  return NULL;
389  }
390  qsv_frame->frame = av_frame_clone(picref);
391  qsv_frame->surface = *(mfxFrameSurface1 *)qsv_frame->frame->data[3];
392  } else {
393  /* make a copy if the input is not padded as libmfx requires */
394  if (picref->height & 31 || picref->linesize[0] & 31) {
395  qsv_frame->frame = ff_get_video_buffer(inlink,
396  FFALIGN(inlink->w, 32),
397  FFALIGN(inlink->h, 32));
398  if (!qsv_frame->frame)
399  return NULL;
400 
401  qsv_frame->frame->width = picref->width;
402  qsv_frame->frame->height = picref->height;
403 
404  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
405  av_frame_free(&qsv_frame->frame);
406  return NULL;
407  }
408 
409  av_frame_copy_props(qsv_frame->frame, picref);
410  } else
411  qsv_frame->frame = av_frame_clone(picref);
412 
413  if (map_frame_to_surface(qsv_frame->frame,
414  &qsv_frame->surface) < 0) {
415  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
416  return NULL;
417  }
418  }
419 
420  qsv_frame->surface.Info = s->frame_infos[FF_INLINK_IDX(inlink)];
421  qsv_frame->surface.Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
422  inlink->time_base, default_tb);
423 
424  qsv_frame->surface.Info.PicStruct =
425  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
426  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
427  MFX_PICSTRUCT_FIELD_BFF);
428  if (qsv_frame->frame->repeat_pict == 1)
429  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
430  else if (qsv_frame->frame->repeat_pict == 2)
431  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
432  else if (qsv_frame->frame->repeat_pict == 4)
433  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
434 
435  return qsv_frame;
436 }
437 
438 /* get the output surface */
440 {
441  AVFilterContext *ctx = outlink->src;
442  QSVFrame *out_frame;
443  int ret;
444 
445  clear_unused_frames(s->out_frame_list);
446 
447  out_frame = get_free_frame(&s->out_frame_list);
448  if (!out_frame)
449  return NULL;
450 
451  /* For video memory, get a hw frame;
452  * For system memory, get a sw frame and map it into a mfx_surface. */
453  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
454  out_frame->frame = av_frame_alloc();
455  if (!out_frame->frame)
456  return NULL;
457 
458  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
459  if (ret < 0) {
460  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
461  return NULL;
462  }
463 
464  out_frame->surface = *(mfxFrameSurface1 *)out_frame->frame->data[3];
465  } else {
466  /* Get a frame with aligned dimensions.
467  * Libmfx need system memory being 128x64 aligned */
468  out_frame->frame = ff_get_video_buffer(outlink,
469  FFALIGN(outlink->w, 128),
470  FFALIGN(outlink->h, 64));
471  if (!out_frame->frame)
472  return NULL;
473 
474  out_frame->frame->width = outlink->w;
475  out_frame->frame->height = outlink->h;
476 
477  ret = map_frame_to_surface(out_frame->frame,
478  &out_frame->surface);
479  if (ret < 0)
480  return NULL;
481  }
482 
483  out_frame->surface.Info = s->vpp_param.vpp.Out;
484 
485  return out_frame;
486 }
487 
488 /* create the QSV session */
490 {
491  AVFilterLink *inlink = avctx->inputs[0];
492  AVFilterLink *outlink = avctx->outputs[0];
493  AVQSVFramesContext *in_frames_hwctx = NULL;
494  AVQSVFramesContext *out_frames_hwctx = NULL;
495 
496  AVBufferRef *device_ref;
497  AVHWDeviceContext *device_ctx;
498  AVQSVDeviceContext *device_hwctx;
499  mfxHDL handle;
500  mfxHandleType handle_type;
501  mfxVersion ver;
502  mfxIMPL impl;
503  int ret, i;
504 
505  if (inlink->hw_frames_ctx) {
506  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
507 
508  device_ref = frames_ctx->device_ref;
509  in_frames_hwctx = frames_ctx->hwctx;
510 
511  s->in_mem_mode = in_frames_hwctx->frame_type;
512 
513  s->surface_ptrs_in = av_calloc(in_frames_hwctx->nb_surfaces,
514  sizeof(*s->surface_ptrs_in));
515  if (!s->surface_ptrs_in)
516  return AVERROR(ENOMEM);
517 
518  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
519  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
520 
521  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
522  } else if (avctx->hw_device_ctx) {
523  device_ref = avctx->hw_device_ctx;
524  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
525  } else {
526  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
527  return AVERROR(EINVAL);
528  }
529 
530  device_ctx = (AVHWDeviceContext *)device_ref->data;
531  device_hwctx = device_ctx->hwctx;
532 
533  if (outlink->format == AV_PIX_FMT_QSV) {
534  AVHWFramesContext *out_frames_ctx;
535  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
536  if (!out_frames_ref)
537  return AVERROR(ENOMEM);
538 
539  s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
540  MFX_MEMTYPE_OPAQUE_FRAME :
541  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_FROM_VPPOUT;
542 
543  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
544  out_frames_hwctx = out_frames_ctx->hwctx;
545 
546  out_frames_ctx->format = AV_PIX_FMT_QSV;
547  out_frames_ctx->width = FFALIGN(outlink->w, 32);
548  out_frames_ctx->height = FFALIGN(outlink->h, 32);
549  out_frames_ctx->sw_format = s->out_sw_format;
550  out_frames_ctx->initial_pool_size = 64;
551  if (avctx->extra_hw_frames > 0)
552  out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
553  out_frames_hwctx->frame_type = s->out_mem_mode;
554 
555  ret = av_hwframe_ctx_init(out_frames_ref);
556  if (ret < 0) {
557  av_buffer_unref(&out_frames_ref);
558  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
559  return ret;
560  }
561 
562  s->surface_ptrs_out = av_calloc(out_frames_hwctx->nb_surfaces,
563  sizeof(*s->surface_ptrs_out));
564  if (!s->surface_ptrs_out) {
565  av_buffer_unref(&out_frames_ref);
566  return AVERROR(ENOMEM);
567  }
568 
569  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
570  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
571  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
572 
573  av_buffer_unref(&outlink->hw_frames_ctx);
574  outlink->hw_frames_ctx = out_frames_ref;
575  } else
576  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
577 
578  /* extract the properties of the "master" session given to us */
579  ret = MFXQueryIMPL(device_hwctx->session, &impl);
580  if (ret == MFX_ERR_NONE)
581  ret = MFXQueryVersion(device_hwctx->session, &ver);
582  if (ret != MFX_ERR_NONE) {
583  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
584  return AVERROR_UNKNOWN;
585  }
586 
587  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
588  handle_type = MFX_HANDLE_VA_DISPLAY;
589  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
590  handle_type = MFX_HANDLE_D3D11_DEVICE;
591  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
592  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
593  } else {
594  av_log(avctx, AV_LOG_ERROR, "Error unsupported handle type\n");
595  return AVERROR_UNKNOWN;
596  }
597 
598  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_type, &handle);
599  if (ret < 0)
600  return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
601  else if (ret > 0) {
602  ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
603  return AVERROR_UNKNOWN;
604  }
605 
606  /* create a "slave" session with those same properties, to be used for vpp */
607  ret = MFXInit(impl, &ver, &s->session);
608  if (ret < 0)
609  return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
610  else if (ret > 0) {
611  ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
612  return AVERROR_UNKNOWN;
613  }
614 
615  if (handle) {
616  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
617  if (ret != MFX_ERR_NONE)
618  return AVERROR_UNKNOWN;
619  }
620 
621  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
622  ret = MFXJoinSession(device_hwctx->session, s->session);
623  if (ret != MFX_ERR_NONE)
624  return AVERROR_UNKNOWN;
625  }
626 
627  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
628  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
629  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
630  s->opaque_alloc.In.Type = s->in_mem_mode;
631 
632  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
633  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
634  s->opaque_alloc.Out.Type = s->out_mem_mode;
635 
636  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
637  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
638  } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
639  mfxFrameAllocator frame_allocator = {
640  .pthis = s,
641  .Alloc = frame_alloc,
642  .Lock = frame_lock,
643  .Unlock = frame_unlock,
644  .GetHDL = frame_get_hdl,
645  .Free = frame_free,
646  };
647 
648  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
649  if (ret != MFX_ERR_NONE)
650  return AVERROR_UNKNOWN;
651  }
652 
653  return 0;
654 }
655 
657 {
658  int i;
659  int ret;
660  QSVVPPContext *s;
661 
662  s = av_mallocz(sizeof(*s));
663  if (!s)
664  return AVERROR(ENOMEM);
665 
666  s->filter_frame = param->filter_frame;
667  if (!s->filter_frame)
668  s->filter_frame = ff_filter_frame;
669  s->out_sw_format = param->out_sw_format;
670 
671  /* create the vpp session */
672  ret = init_vpp_session(avctx, s);
673  if (ret < 0)
674  goto failed;
675 
676  s->frame_infos = av_calloc(avctx->nb_inputs, sizeof(*s->frame_infos));
677  if (!s->frame_infos) {
678  ret = AVERROR(ENOMEM);
679  goto failed;
680  }
681 
682  /* Init each input's information */
683  for (i = 0; i < avctx->nb_inputs; i++) {
684  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
685  if (ret < 0)
686  goto failed;
687  }
688 
689  /* Update input's frame info according to crop */
690  for (i = 0; i < param->num_crop; i++) {
691  QSVVPPCrop *crop = param->crop + i;
692  if (crop->in_idx > avctx->nb_inputs) {
693  ret = AVERROR(EINVAL);
694  goto failed;
695  }
696  s->frame_infos[crop->in_idx].CropX = crop->x;
697  s->frame_infos[crop->in_idx].CropY = crop->y;
698  s->frame_infos[crop->in_idx].CropW = crop->w;
699  s->frame_infos[crop->in_idx].CropH = crop->h;
700  }
701 
702  s->vpp_param.vpp.In = s->frame_infos[0];
703 
704  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
705  if (ret < 0) {
706  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
707  goto failed;
708  }
709 
710  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
711  s->nb_ext_buffers = param->num_ext_buf + 1;
712  s->ext_buffers = av_calloc(s->nb_ext_buffers, sizeof(*s->ext_buffers));
713  if (!s->ext_buffers) {
714  ret = AVERROR(ENOMEM);
715  goto failed;
716  }
717 
718  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
719  for (i = 1; i < param->num_ext_buf; i++)
720  s->ext_buffers[i] = param->ext_buf[i - 1];
721  s->vpp_param.ExtParam = s->ext_buffers;
722  s->vpp_param.NumExtParam = s->nb_ext_buffers;
723  } else {
724  s->vpp_param.NumExtParam = param->num_ext_buf;
725  s->vpp_param.ExtParam = param->ext_buf;
726  }
727 
728  s->got_frame = 0;
729 
730  /** keep fifo size at least 1. Even when async_depth is 0, fifo is used. */
731  s->async_fifo = av_fifo_alloc2(param->async_depth + 1, sizeof(QSVAsyncFrame), 0);
732  s->async_depth = param->async_depth;
733  if (!s->async_fifo) {
734  ret = AVERROR(ENOMEM);
735  goto failed;
736  }
737 
738  s->vpp_param.AsyncDepth = param->async_depth;
739 
740  if (IS_SYSTEM_MEMORY(s->in_mem_mode))
741  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
742  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
743  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
744  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
745  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
746 
747  if (IS_SYSTEM_MEMORY(s->out_mem_mode))
748  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
749  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
750  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
751  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
752  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
753 
754  /* Print input memory mode */
755  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
756  /* Print output memory mode */
757  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
758  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
759  if (ret < 0) {
760  ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
761  goto failed;
762  } else if (ret > 0)
763  ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
764 
765  *vpp = s;
766  return 0;
767 
768 failed:
769  ff_qsvvpp_free(&s);
770 
771  return ret;
772 }
773 
775 {
776  QSVVPPContext *s = *vpp;
777 
778  if (!s)
779  return 0;
780 
781  if (s->session) {
782  MFXVideoVPP_Close(s->session);
783  MFXClose(s->session);
784  }
785 
786  /* release all the resources */
787  clear_frame_list(&s->in_frame_list);
788  clear_frame_list(&s->out_frame_list);
789  av_freep(&s->surface_ptrs_in);
790  av_freep(&s->surface_ptrs_out);
791  av_freep(&s->ext_buffers);
792  av_freep(&s->frame_infos);
793  av_fifo_freep2(&s->async_fifo);
794  av_freep(vpp);
795 
796  return 0;
797 }
798 
800 {
801  AVFilterContext *ctx = inlink->dst;
802  AVFilterLink *outlink = ctx->outputs[0];
803  QSVAsyncFrame aframe;
804  mfxSyncPoint sync;
805  QSVFrame *in_frame, *out_frame;
806  int ret, filter_ret;
807 
808  while (s->eof && av_fifo_read(s->async_fifo, &aframe, 1) >= 0) {
809  if (MFXVideoCORE_SyncOperation(s->session, aframe.sync, 1000) < 0)
810  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
811 
812  filter_ret = s->filter_frame(outlink, aframe.frame->frame);
813  if (filter_ret < 0) {
814  av_frame_free(&aframe.frame->frame);
815  return filter_ret;
816  }
817  aframe.frame->queued--;
818  s->got_frame = 1;
819  aframe.frame->frame = NULL;
820  };
821 
822  if (!picref)
823  return 0;
824 
825  in_frame = submit_frame(s, inlink, picref);
826  if (!in_frame) {
827  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
829  return AVERROR(ENOMEM);
830  }
831 
832  do {
833  out_frame = query_frame(s, outlink);
834  if (!out_frame) {
835  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
836  return AVERROR(ENOMEM);
837  }
838 
839  do {
840  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, &in_frame->surface,
841  &out_frame->surface, NULL, &sync);
842  if (ret == MFX_WRN_DEVICE_BUSY)
843  av_usleep(500);
844  } while (ret == MFX_WRN_DEVICE_BUSY);
845 
846  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
847  /* Ignore more_data error */
848  if (ret == MFX_ERR_MORE_DATA)
849  return AVERROR(EAGAIN);
850  break;
851  }
852  out_frame->frame->pts = av_rescale_q(out_frame->surface.Data.TimeStamp,
853  default_tb, outlink->time_base);
854 
855  out_frame->queued++;
856  aframe = (QSVAsyncFrame){ sync, out_frame };
857  av_fifo_write(s->async_fifo, &aframe, 1);
858 
859  if (av_fifo_can_read(s->async_fifo) > s->async_depth) {
860  av_fifo_read(s->async_fifo, &aframe, 1);
861 
862  do {
863  ret = MFXVideoCORE_SyncOperation(s->session, aframe.sync, 1000);
864  } while (ret == MFX_WRN_IN_EXECUTION);
865 
866  filter_ret = s->filter_frame(outlink, aframe.frame->frame);
867  if (filter_ret < 0) {
868  av_frame_free(&aframe.frame->frame);
869  return filter_ret;
870  }
871 
872  aframe.frame->queued--;
873  s->got_frame = 1;
874  aframe.frame->frame = NULL;
875  }
876  } while(ret == MFX_ERR_MORE_SURFACE);
877 
878  if (ret < 0)
879  return ff_qsvvpp_print_error(ctx, ret, "Error running VPP");
880  else if (ret > 0)
881  ff_qsvvpp_print_warning(ctx, ret, "Warning in running VPP");
882 
883  return 0;
884 }
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
qsv_errors
static const struct @223 qsv_errors[]
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:209
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
QSVVPPCrop::in_idx
int in_idx
Input index.
Definition: qsvvpp.h:80
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
mfx_iopattern
int mfx_iopattern
Definition: qsvvpp.c:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
IS_OPAQUE_MEMORY
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
out
FILE * out
Definition: movenc.c:54
init_vpp_session
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:489
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:159
QSVVPPParam::crop
QSVVPPCrop * crop
Definition: qsvvpp.h:97
QSVVPPParam::out_sw_format
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:93
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:999
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2675
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:116
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:432
AVFrame::width
int width
Definition: frame.h:397
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:482
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:464
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
mathematics.h
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
video.h
QSVFrame::frame
AVFrame * frame
Definition: qsv_internal.h:78
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
qsvvpp.h
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:494
clear_unused_frames
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:317
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:104
ff_qsvvpp_print_iopattern
int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsvvpp.c:60
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsvvpp.c:39
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSVVPPCrop::w
int w
Definition: qsvvpp.h:81
s
#define s(width, name)
Definition: cbs_vp9.c:256
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
ff_qsvvpp_create
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:656
qsv_iopatterns
static const struct @222 qsv_iopatterns[]
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:64
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:474
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
default_tb
static const AVRational default_tb
Definition: qsvvpp.c:41
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
QSVVPPParam::async_depth
int async_depth
Definition: qsvvpp.h:99
if
if(ret)
Definition: filter_design.txt:179
fill_frameinfo_by_link
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:270
QSVFrame
Definition: qsv_internal.h:77
QSVVPPContext
Definition: qsvvpp.h:50
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:603
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
QSVVPPParam::num_crop
int num_crop
Definition: qsvvpp.h:96
QSVVPPParam
Definition: qsvvpp.h:84
QSVVPPCrop::x
int x
Definition: qsvvpp.h:81
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
pix_fmt_to_mfx_fourcc
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:221
ff_qsvvpp_print_error
int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsvvpp.c:138
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:416
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
QSVFrame::surface
mfxFrameSurface1 surface
Definition: qsv_internal.h:79
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
ff_qsvvpp_free
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:774
AVFilterContext::nb_inputs
unsigned nb_inputs
number of input pads
Definition: avfilter.h:417
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:771
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
IS_VIDEO_MEMORY
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:204
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
mfxerr
mfxStatus mfxerr
Definition: qsvvpp.c:78
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:381
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
internal.h
QSVAsyncFrame::frame
QSVFrame * frame
Definition: qsvdec.c:65
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:477
QSVAsyncFrame::sync
mfxSyncPoint sync
Definition: qsvvpp.c:44
hwcontext_qsv.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
common.h
desc
const char * desc
Definition: qsvvpp.c:50
QSVVPPParam::num_ext_buf
int num_ext_buf
Definition: qsvvpp.h:89
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:199
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
QSVVPPParam::filter_frame
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:86
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
map_frame_to_surface
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:237
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVFrame::queued
int queued
Definition: qsv_internal.h:91
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:427
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:664
QSVVPPCrop::h
int h
Crop rectangle.
Definition: qsvvpp.h:81
QSVVPPCrop::y
int y
Definition: qsvvpp.h:81
AVFrame::height
int height
Definition: frame.h:397
ff_qsvvpp_filter_frame
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:799
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AVRational::den
int den
Denominator.
Definition: rational.h:60
averr
int averr
Definition: qsvvpp.c:79
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:333
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Definition: qsvvpp.c:123
clear_frame_list
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:330
AVFilterContext
An instance of a filter.
Definition: avfilter.h:408
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:457
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVAsyncFrame::sync
mfxSyncPoint * sync
Definition: qsvdec.c:64
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
IS_SYSTEM_MEMORY
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVVPPCrop
Definition: qsvvpp.h:79
get_free_frame
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:342
QSVAsyncFrame
Definition: qsvdec.c:63
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:503
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:193
ff_qsvvpp_print_warning
int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsvvpp.c:148
AVFrame::repeat_pict
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:472
QSVVPPParam::ext_buf
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:90
submit_frame
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:368
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:420
query_frame
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:439