FFmpeg
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
40 
41 static const AVRational default_tb = { 1, 90000 };
42 
43 static const struct {
45  const char *desc;
46 } qsv_iopatterns[] = {
47  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
48  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
49  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
50  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
51  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
52  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
53 };
54 
56  const char *extra_string)
57 {
58  const char *desc = NULL;
59 
60  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
62  desc = qsv_iopatterns[i].desc;
63  }
64  }
65  if (!desc)
66  desc = "unknown iopattern";
67 
68  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
69  return 0;
70 }
71 
72 static const struct {
73  mfxStatus mfxerr;
74  int averr;
75  const char *desc;
76 } qsv_errors[] = {
77  { MFX_ERR_NONE, 0, "success" },
78  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
79  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
80  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
81  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
82  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
83  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
84  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
85  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
86  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
87  /* the following 3 errors should always be handled explicitly, so those "mappings"
88  * are for completeness only */
89  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
90  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
91  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
92  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
93  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
94  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
95  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
96  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
97  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
98  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
99  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
100 
101  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
102  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
103  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
104  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
105  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
106  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
107  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
108  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
109  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
110 };
111 
112 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
113 {
114  int i;
115  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
116  if (qsv_errors[i].mfxerr == mfx_err) {
117  if (desc)
118  *desc = qsv_errors[i].desc;
119  return qsv_errors[i].averr;
120  }
121  }
122  if (desc)
123  *desc = "unknown error";
124  return AVERROR_UNKNOWN;
125 }
126 
127 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
128  const char *error_string)
129 {
130  const char *desc;
131  int ret;
132  ret = qsv_map_error(err, &desc);
133  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
134  return ret;
135 }
136 
137 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
138  const char *warning_string)
139 {
140  const char *desc;
141  int ret;
142  ret = qsv_map_error(err, &desc);
143  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
144  return ret;
145 }
146 
147 /* functions for frameAlloc */
148 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
149  mfxFrameAllocResponse *resp)
150 {
151  QSVVPPContext *s = pthis;
152  int i;
153 
154  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
155  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
156  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
157  return MFX_ERR_UNSUPPORTED;
158 
159  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
160  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
161  if (!resp->mids)
162  return AVERROR(ENOMEM);
163 
164  for (i = 0; i < s->nb_surface_ptrs_in; i++)
165  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
166 
167  resp->NumFrameActual = s->nb_surface_ptrs_in;
168  } else {
169  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
170  if (!resp->mids)
171  return AVERROR(ENOMEM);
172 
173  for (i = 0; i < s->nb_surface_ptrs_out; i++)
174  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
175 
176  resp->NumFrameActual = s->nb_surface_ptrs_out;
177  }
178 
179  return MFX_ERR_NONE;
180 }
181 
182 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
183 {
184  av_freep(&resp->mids);
185  return MFX_ERR_NONE;
186 }
187 
188 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
189 {
190  return MFX_ERR_UNSUPPORTED;
191 }
192 
193 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
194 {
195  return MFX_ERR_UNSUPPORTED;
196 }
197 
198 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
199 {
200  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
201  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
202 
203  pair_dst->first = pair_src->first;
204 
205  if (pair_src->second != (mfxMemId)MFX_INFINITE)
206  pair_dst->second = pair_src->second;
207  return MFX_ERR_NONE;
208 }
209 
211 {
212  switch (format) {
213  case AV_PIX_FMT_YUV420P:
214  return MFX_FOURCC_YV12;
215  case AV_PIX_FMT_NV12:
216  return MFX_FOURCC_NV12;
217  case AV_PIX_FMT_YUYV422:
218  return MFX_FOURCC_YUY2;
219  case AV_PIX_FMT_BGRA:
220  return MFX_FOURCC_RGB4;
221  }
222 
223  return MFX_FOURCC_NV12;
224 }
225 
226 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
227 {
228  switch (frame->format) {
229  case AV_PIX_FMT_NV12:
230  case AV_PIX_FMT_P010:
231  surface->Data.Y = frame->data[0];
232  surface->Data.UV = frame->data[1];
233  break;
234  case AV_PIX_FMT_YUV420P:
235  surface->Data.Y = frame->data[0];
236  surface->Data.U = frame->data[1];
237  surface->Data.V = frame->data[2];
238  break;
239  case AV_PIX_FMT_YUYV422:
240  surface->Data.Y = frame->data[0];
241  surface->Data.U = frame->data[0] + 1;
242  surface->Data.V = frame->data[0] + 3;
243  break;
244  case AV_PIX_FMT_RGB32:
245  surface->Data.B = frame->data[0];
246  surface->Data.G = frame->data[0] + 1;
247  surface->Data.R = frame->data[0] + 2;
248  surface->Data.A = frame->data[0] + 3;
249  break;
250  default:
251  return MFX_ERR_UNSUPPORTED;
252  }
253  surface->Data.Pitch = frame->linesize[0];
254 
255  return 0;
256 }
257 
258 /* fill the surface info */
259 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
260 {
261  enum AVPixelFormat pix_fmt;
262  AVHWFramesContext *frames_ctx;
263  AVQSVFramesContext *frames_hwctx;
264  const AVPixFmtDescriptor *desc;
265 
266  if (link->format == AV_PIX_FMT_QSV) {
267  if (!link->hw_frames_ctx)
268  return AVERROR(EINVAL);
269 
270  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
271  frames_hwctx = frames_ctx->hwctx;
272  *frameinfo = frames_hwctx->surfaces[0].Info;
273  } else {
274  pix_fmt = link->format;
276  if (!desc)
277  return AVERROR_BUG;
278 
279  frameinfo->CropX = 0;
280  frameinfo->CropY = 0;
281  frameinfo->Width = FFALIGN(link->w, 32);
282  frameinfo->Height = FFALIGN(link->h, 32);
283  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
284  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
285  frameinfo->BitDepthLuma = desc->comp[0].depth;
286  frameinfo->BitDepthChroma = desc->comp[0].depth;
287  frameinfo->Shift = desc->comp[0].depth > 8;
288  if (desc->log2_chroma_w && desc->log2_chroma_h)
289  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
290  else if (desc->log2_chroma_w)
291  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
292  else
293  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
294  }
295 
296  frameinfo->CropW = link->w;
297  frameinfo->CropH = link->h;
298  frameinfo->FrameRateExtN = link->frame_rate.num;
299  frameinfo->FrameRateExtD = link->frame_rate.den;
300  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
301  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
302 
303  return 0;
304 }
305 
307 {
308  while (list) {
309  /* list->queued==1 means the frame is not cached in VPP
310  * process any more, it can be released to pool. */
311  if ((list->queued == 1) && !list->surface.Data.Locked) {
312  av_frame_free(&list->frame);
313  list->queued = 0;
314  }
315  list = list->next;
316  }
317 }
318 
320 {
321  while (*list) {
322  QSVFrame *frame;
323 
324  frame = *list;
325  *list = (*list)->next;
326  av_frame_free(&frame->frame);
327  av_freep(&frame);
328  }
329 }
330 
332 {
333  QSVFrame *out = *list;
334 
335  for (; out; out = out->next) {
336  if (!out->queued) {
337  out->queued = 1;
338  break;
339  }
340  }
341 
342  if (!out) {
343  out = av_mallocz(sizeof(*out));
344  if (!out) {
345  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
346  return NULL;
347  }
348  out->queued = 1;
349  out->next = *list;
350  *list = out;
351  }
352 
353  return out;
354 }
355 
356 /* get the input surface */
358 {
359  QSVFrame *qsv_frame;
360  AVFilterContext *ctx = inlink->dst;
361 
362  clear_unused_frames(s->in_frame_list);
363 
364  qsv_frame = get_free_frame(&s->in_frame_list);
365  if (!qsv_frame)
366  return NULL;
367 
368  /* Turn AVFrame into mfxFrameSurface1.
369  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
370  * mfxFrameSurface1 is stored in AVFrame->data[3];
371  * for system memory mode, raw video data is stored in
372  * AVFrame, we should map it into mfxFrameSurface1.
373  */
374  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
375  if (picref->format != AV_PIX_FMT_QSV) {
376  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
377  return NULL;
378  }
379  qsv_frame->frame = av_frame_clone(picref);
380  qsv_frame->surface = *(mfxFrameSurface1 *)qsv_frame->frame->data[3];
381  } else {
382  /* make a copy if the input is not padded as libmfx requires */
383  if (picref->height & 31 || picref->linesize[0] & 31) {
384  qsv_frame->frame = ff_get_video_buffer(inlink,
385  FFALIGN(inlink->w, 32),
386  FFALIGN(inlink->h, 32));
387  if (!qsv_frame->frame)
388  return NULL;
389 
390  qsv_frame->frame->width = picref->width;
391  qsv_frame->frame->height = picref->height;
392 
393  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
394  av_frame_free(&qsv_frame->frame);
395  return NULL;
396  }
397 
398  av_frame_copy_props(qsv_frame->frame, picref);
399  } else
400  qsv_frame->frame = av_frame_clone(picref);
401 
402  if (map_frame_to_surface(qsv_frame->frame,
403  &qsv_frame->surface) < 0) {
404  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
405  return NULL;
406  }
407  }
408 
409  qsv_frame->surface.Info = s->frame_infos[FF_INLINK_IDX(inlink)];
410  qsv_frame->surface.Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
411  inlink->time_base, default_tb);
412 
413  qsv_frame->surface.Info.PicStruct =
414  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
415  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
416  MFX_PICSTRUCT_FIELD_BFF);
417  if (qsv_frame->frame->repeat_pict == 1)
418  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
419  else if (qsv_frame->frame->repeat_pict == 2)
420  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
421  else if (qsv_frame->frame->repeat_pict == 4)
422  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
423 
424  return qsv_frame;
425 }
426 
427 /* get the output surface */
429 {
430  AVFilterContext *ctx = outlink->src;
431  QSVFrame *out_frame;
432  int ret;
433 
434  clear_unused_frames(s->out_frame_list);
435 
436  out_frame = get_free_frame(&s->out_frame_list);
437  if (!out_frame)
438  return NULL;
439 
440  /* For video memory, get a hw frame;
441  * For system memory, get a sw frame and map it into a mfx_surface. */
442  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
443  out_frame->frame = av_frame_alloc();
444  if (!out_frame->frame)
445  return NULL;
446 
447  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
448  if (ret < 0) {
449  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
450  return NULL;
451  }
452 
453  out_frame->surface = *(mfxFrameSurface1 *)out_frame->frame->data[3];
454  } else {
455  /* Get a frame with aligned dimensions.
456  * Libmfx need system memory being 128x64 aligned */
457  out_frame->frame = ff_get_video_buffer(outlink,
458  FFALIGN(outlink->w, 128),
459  FFALIGN(outlink->h, 64));
460  if (!out_frame->frame)
461  return NULL;
462 
463  out_frame->frame->width = outlink->w;
464  out_frame->frame->height = outlink->h;
465 
466  ret = map_frame_to_surface(out_frame->frame,
467  &out_frame->surface);
468  if (ret < 0)
469  return NULL;
470  }
471 
472  out_frame->surface.Info = s->vpp_param.vpp.Out;
473 
474  return out_frame;
475 }
476 
477 /* create the QSV session */
479 {
480  AVFilterLink *inlink = avctx->inputs[0];
481  AVFilterLink *outlink = avctx->outputs[0];
482  AVQSVFramesContext *in_frames_hwctx = NULL;
483  AVQSVFramesContext *out_frames_hwctx = NULL;
484 
485  AVBufferRef *device_ref;
486  AVHWDeviceContext *device_ctx;
487  AVQSVDeviceContext *device_hwctx;
488  mfxHDL handle;
489  mfxHandleType handle_type;
490  mfxVersion ver;
491  mfxIMPL impl;
492  int ret, i;
493 
494  if (inlink->hw_frames_ctx) {
495  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
496 
497  device_ref = frames_ctx->device_ref;
498  in_frames_hwctx = frames_ctx->hwctx;
499 
500  s->in_mem_mode = in_frames_hwctx->frame_type;
501 
502  s->surface_ptrs_in = av_calloc(in_frames_hwctx->nb_surfaces,
503  sizeof(*s->surface_ptrs_in));
504  if (!s->surface_ptrs_in)
505  return AVERROR(ENOMEM);
506 
507  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
508  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
509 
510  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
511  } else if (avctx->hw_device_ctx) {
512  device_ref = avctx->hw_device_ctx;
513  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
514  } else {
515  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
516  return AVERROR(EINVAL);
517  }
518 
519  device_ctx = (AVHWDeviceContext *)device_ref->data;
520  device_hwctx = device_ctx->hwctx;
521 
522  if (outlink->format == AV_PIX_FMT_QSV) {
523  AVHWFramesContext *out_frames_ctx;
524  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
525  if (!out_frames_ref)
526  return AVERROR(ENOMEM);
527 
528  s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
529  MFX_MEMTYPE_OPAQUE_FRAME :
530  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_FROM_VPPOUT;
531 
532  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
533  out_frames_hwctx = out_frames_ctx->hwctx;
534 
535  out_frames_ctx->format = AV_PIX_FMT_QSV;
536  out_frames_ctx->width = FFALIGN(outlink->w, 32);
537  out_frames_ctx->height = FFALIGN(outlink->h, 32);
538  out_frames_ctx->sw_format = s->out_sw_format;
539  out_frames_ctx->initial_pool_size = 64;
540  if (avctx->extra_hw_frames > 0)
541  out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
542  out_frames_hwctx->frame_type = s->out_mem_mode;
543 
544  ret = av_hwframe_ctx_init(out_frames_ref);
545  if (ret < 0) {
546  av_buffer_unref(&out_frames_ref);
547  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
548  return ret;
549  }
550 
551  s->surface_ptrs_out = av_calloc(out_frames_hwctx->nb_surfaces,
552  sizeof(*s->surface_ptrs_out));
553  if (!s->surface_ptrs_out) {
554  av_buffer_unref(&out_frames_ref);
555  return AVERROR(ENOMEM);
556  }
557 
558  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
559  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
560  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
561 
562  av_buffer_unref(&outlink->hw_frames_ctx);
563  outlink->hw_frames_ctx = out_frames_ref;
564  } else
565  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
566 
567  /* extract the properties of the "master" session given to us */
568  ret = MFXQueryIMPL(device_hwctx->session, &impl);
569  if (ret == MFX_ERR_NONE)
570  ret = MFXQueryVersion(device_hwctx->session, &ver);
571  if (ret != MFX_ERR_NONE) {
572  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
573  return AVERROR_UNKNOWN;
574  }
575 
576  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
577  handle_type = MFX_HANDLE_VA_DISPLAY;
578  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
579  handle_type = MFX_HANDLE_D3D11_DEVICE;
580  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
581  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
582  } else {
583  av_log(avctx, AV_LOG_ERROR, "Error unsupported handle type\n");
584  return AVERROR_UNKNOWN;
585  }
586 
587  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_type, &handle);
588  if (ret < 0)
589  return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
590  else if (ret > 0) {
591  ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
592  return AVERROR_UNKNOWN;
593  }
594 
595  /* create a "slave" session with those same properties, to be used for vpp */
596  ret = MFXInit(impl, &ver, &s->session);
597  if (ret < 0)
598  return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
599  else if (ret > 0) {
600  ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
601  return AVERROR_UNKNOWN;
602  }
603 
604  if (handle) {
605  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
606  if (ret != MFX_ERR_NONE)
607  return AVERROR_UNKNOWN;
608  }
609 
610  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
611  ret = MFXJoinSession(device_hwctx->session, s->session);
612  if (ret != MFX_ERR_NONE)
613  return AVERROR_UNKNOWN;
614  }
615 
616  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
617  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
618  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
619  s->opaque_alloc.In.Type = s->in_mem_mode;
620 
621  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
622  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
623  s->opaque_alloc.Out.Type = s->out_mem_mode;
624 
625  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
626  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
627  } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
628  mfxFrameAllocator frame_allocator = {
629  .pthis = s,
630  .Alloc = frame_alloc,
631  .Lock = frame_lock,
632  .Unlock = frame_unlock,
633  .GetHDL = frame_get_hdl,
634  .Free = frame_free,
635  };
636 
637  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
638  if (ret != MFX_ERR_NONE)
639  return AVERROR_UNKNOWN;
640  }
641 
642  return 0;
643 }
644 
645 static unsigned int qsv_fifo_item_size(void)
646 {
647  return sizeof(mfxSyncPoint) + sizeof(QSVFrame*);
648 }
649 
650 static unsigned int qsv_fifo_size(const AVFifoBuffer* fifo)
651 {
652  return av_fifo_size(fifo)/qsv_fifo_item_size();
653 }
654 
656 {
657  int i;
658  int ret;
659  QSVVPPContext *s;
660 
661  s = av_mallocz(sizeof(*s));
662  if (!s)
663  return AVERROR(ENOMEM);
664 
665  s->filter_frame = param->filter_frame;
666  if (!s->filter_frame)
667  s->filter_frame = ff_filter_frame;
668  s->out_sw_format = param->out_sw_format;
669 
670  /* create the vpp session */
671  ret = init_vpp_session(avctx, s);
672  if (ret < 0)
673  goto failed;
674 
675  s->frame_infos = av_calloc(avctx->nb_inputs, sizeof(*s->frame_infos));
676  if (!s->frame_infos) {
677  ret = AVERROR(ENOMEM);
678  goto failed;
679  }
680 
681  /* Init each input's information */
682  for (i = 0; i < avctx->nb_inputs; i++) {
683  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
684  if (ret < 0)
685  goto failed;
686  }
687 
688  /* Update input's frame info according to crop */
689  for (i = 0; i < param->num_crop; i++) {
690  QSVVPPCrop *crop = param->crop + i;
691  if (crop->in_idx > avctx->nb_inputs) {
692  ret = AVERROR(EINVAL);
693  goto failed;
694  }
695  s->frame_infos[crop->in_idx].CropX = crop->x;
696  s->frame_infos[crop->in_idx].CropY = crop->y;
697  s->frame_infos[crop->in_idx].CropW = crop->w;
698  s->frame_infos[crop->in_idx].CropH = crop->h;
699  }
700 
701  s->vpp_param.vpp.In = s->frame_infos[0];
702 
703  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
704  if (ret < 0) {
705  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
706  goto failed;
707  }
708 
709  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
710  s->nb_ext_buffers = param->num_ext_buf + 1;
711  s->ext_buffers = av_calloc(s->nb_ext_buffers, sizeof(*s->ext_buffers));
712  if (!s->ext_buffers) {
713  ret = AVERROR(ENOMEM);
714  goto failed;
715  }
716 
717  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
718  for (i = 1; i < param->num_ext_buf; i++)
719  s->ext_buffers[i] = param->ext_buf[i - 1];
720  s->vpp_param.ExtParam = s->ext_buffers;
721  s->vpp_param.NumExtParam = s->nb_ext_buffers;
722  } else {
723  s->vpp_param.NumExtParam = param->num_ext_buf;
724  s->vpp_param.ExtParam = param->ext_buf;
725  }
726 
727  s->got_frame = 0;
728 
729  /** keep fifo size at least 1. Even when async_depth is 0, fifo is used. */
730  s->async_fifo = av_fifo_alloc((param->async_depth + 1) * qsv_fifo_item_size());
731  s->async_depth = param->async_depth;
732  if (!s->async_fifo) {
733  ret = AVERROR(ENOMEM);
734  goto failed;
735  }
736 
737  s->vpp_param.AsyncDepth = param->async_depth;
738 
739  if (IS_SYSTEM_MEMORY(s->in_mem_mode))
740  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
741  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
742  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
743  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
744  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
745 
746  if (IS_SYSTEM_MEMORY(s->out_mem_mode))
747  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
748  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
749  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
750  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
751  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
752 
753  /* Print input memory mode */
754  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
755  /* Print output memory mode */
756  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
757  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
758  if (ret < 0) {
759  ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
760  goto failed;
761  } else if (ret > 0)
762  ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
763 
764  *vpp = s;
765  return 0;
766 
767 failed:
768  ff_qsvvpp_free(&s);
769 
770  return ret;
771 }
772 
774 {
775  QSVVPPContext *s = *vpp;
776 
777  if (!s)
778  return 0;
779 
780  if (s->session) {
781  MFXVideoVPP_Close(s->session);
782  MFXClose(s->session);
783  }
784 
785  /* release all the resources */
786  clear_frame_list(&s->in_frame_list);
787  clear_frame_list(&s->out_frame_list);
788  av_freep(&s->surface_ptrs_in);
789  av_freep(&s->surface_ptrs_out);
790  av_freep(&s->ext_buffers);
791  av_freep(&s->frame_infos);
792  av_fifo_free(s->async_fifo);
793  av_freep(vpp);
794 
795  return 0;
796 }
797 
799 {
800  AVFilterContext *ctx = inlink->dst;
801  AVFilterLink *outlink = ctx->outputs[0];
802  mfxSyncPoint sync;
803  QSVFrame *in_frame, *out_frame, *tmp;
804  int ret, filter_ret;
805 
806  while (s->eof && qsv_fifo_size(s->async_fifo)) {
807  av_fifo_generic_read(s->async_fifo, &tmp, sizeof(tmp), NULL);
808  av_fifo_generic_read(s->async_fifo, &sync, sizeof(sync), NULL);
809  if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
810  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
811 
812  filter_ret = s->filter_frame(outlink, tmp->frame);
813  if (filter_ret < 0) {
814  av_frame_free(&tmp->frame);
815  return filter_ret;
816  }
817  tmp->queued--;
818  s->got_frame = 1;
819  tmp->frame = NULL;
820  };
821 
822  if (!picref)
823  return 0;
824 
825  in_frame = submit_frame(s, inlink, picref);
826  if (!in_frame) {
827  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
829  return AVERROR(ENOMEM);
830  }
831 
832  do {
833  out_frame = query_frame(s, outlink);
834  if (!out_frame) {
835  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
836  return AVERROR(ENOMEM);
837  }
838 
839  do {
840  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, &in_frame->surface,
841  &out_frame->surface, NULL, &sync);
842  if (ret == MFX_WRN_DEVICE_BUSY)
843  av_usleep(500);
844  } while (ret == MFX_WRN_DEVICE_BUSY);
845 
846  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
847  /* Ignore more_data error */
848  if (ret == MFX_ERR_MORE_DATA)
849  return AVERROR(EAGAIN);
850  break;
851  }
852  out_frame->frame->pts = av_rescale_q(out_frame->surface.Data.TimeStamp,
853  default_tb, outlink->time_base);
854 
855  out_frame->queued++;
856  av_fifo_generic_write(s->async_fifo, &out_frame, sizeof(out_frame), NULL);
857  av_fifo_generic_write(s->async_fifo, &sync, sizeof(sync), NULL);
858 
859 
860  if (qsv_fifo_size(s->async_fifo) > s->async_depth) {
861  av_fifo_generic_read(s->async_fifo, &tmp, sizeof(tmp), NULL);
862  av_fifo_generic_read(s->async_fifo, &sync, sizeof(sync), NULL);
863 
864  do {
865  ret = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
866  } while (ret == MFX_WRN_IN_EXECUTION);
867 
868  filter_ret = s->filter_frame(outlink, tmp->frame);
869  if (filter_ret < 0) {
870  av_frame_free(&tmp->frame);
871  return filter_ret;
872  }
873 
874  tmp->queued--;
875  s->got_frame = 1;
876  tmp->frame = NULL;
877  }
878  } while(ret == MFX_ERR_MORE_SURFACE);
879 
880  if (ret < 0)
881  return ff_qsvvpp_print_error(ctx, ret, "Error running VPP");
882  else if (ret > 0)
883  ff_qsvvpp_print_warning(ctx, ret, "Warning in running VPP");
884 
885  return 0;
886 }
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:98
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:198
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
QSVVPPCrop::in_idx
int in_idx
Input index.
Definition: qsvvpp.h:80
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
mfx_iopattern
int mfx_iopattern
Definition: qsvvpp.c:44
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
IS_OPAQUE_MEMORY
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
av_fifo_generic_write
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
out
FILE * out
Definition: movenc.c:54
init_vpp_session
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:478
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:148
QSVVPPParam::crop
QSVVPPCrop * crop
Definition: qsvvpp.h:97
QSVVPPParam::out_sw_format
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:93
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2660
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
av_fifo_free
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:109
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:26
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:424
AVFrame::width
int width
Definition: frame.h:389
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:474
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:458
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
mathematics.h
av_fifo_generic_read
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
video.h
QSVFrame::frame
AVFrame * frame
Definition: qsv_internal.h:73
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:338
AVFifoBuffer
Definition: fifo.h:31
qsvvpp.h
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:488
clear_unused_frames
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:306
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:97
ff_qsvvpp_print_iopattern
int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsvvpp.c:55
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsvvpp.c:39
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSVVPPCrop::w
int w
Definition: qsvvpp.h:81
s
#define s(width, name)
Definition: cbs_vp9.c:257
ff_qsvvpp_create
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:655
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:59
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:422
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:141
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
default_tb
static const AVRational default_tb
Definition: qsvvpp.c:41
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
QSVVPPParam::async_depth
int async_depth
Definition: qsvvpp.h:99
if
if(ret)
Definition: filter_design.txt:179
fill_frameinfo_by_link
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:259
QSVFrame
Definition: qsv_internal.h:72
QSVVPPContext
Definition: qsvvpp.h:50
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:537
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
QSVVPPParam::num_crop
int num_crop
Definition: qsvvpp.h:96
QSVVPPParam
Definition: qsvvpp.h:84
QSVVPPCrop::x
int x
Definition: qsvvpp.h:81
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
qsv_iopatterns
static const struct @211 qsv_iopatterns[]
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
pix_fmt_to_mfx_fourcc
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:210
ff_qsvvpp_print_error
int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsvvpp.c:127
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:410
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
QSVFrame::surface
mfxFrameSurface1 surface
Definition: qsv_internal.h:74
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
ff_qsvvpp_free
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:773
AVFilterContext::nb_inputs
unsigned nb_inputs
number of input pads
Definition: avfilter.h:411
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:678
IS_VIDEO_MEMORY
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:193
format
ofilter format
Definition: ffmpeg_filter.c:172
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:404
mfxerr
mfxStatus mfxerr
Definition: qsvvpp.c:73
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:377
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
internal.h
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:469
hwcontext_qsv.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
common.h
desc
const char * desc
Definition: qsvvpp.c:45
QSVVPPParam::num_ext_buf
int num_ext_buf
Definition: qsvvpp.h:89
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:188
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:263
QSVVPPParam::filter_frame
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:86
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:271
qsv_fifo_item_size
static unsigned int qsv_fifo_item_size(void)
Definition: qsvvpp.c:645
map_frame_to_surface
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:226
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVFrame::queued
int queued
Definition: qsv_internal.h:81
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:419
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:643
QSVVPPCrop::h
int h
Crop rectangle.
Definition: qsvvpp.h:81
QSVVPPCrop::y
int y
Definition: qsvvpp.h:81
AVFrame::height
int height
Definition: frame.h:389
ff_qsvvpp_filter_frame
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:798
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AVRational::den
int den
Denominator.
Definition: rational.h:60
averr
int averr
Definition: qsvvpp.c:74
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:334
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Definition: qsvvpp.c:112
clear_frame_list
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:319
AVFilterContext
An instance of a filter.
Definition: avfilter.h:402
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:453
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
av_fifo_size
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_fifo_alloc
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:362
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
IS_SYSTEM_MEMORY
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVVPPCrop
Definition: qsvvpp.h:79
get_free_frame
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:331
qsv_errors
static const struct @212 qsv_errors[]
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:502
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:182
qsv_fifo_size
static unsigned int qsv_fifo_size(const AVFifoBuffer *fifo)
Definition: qsvvpp.c:650
ff_qsvvpp_print_warning
int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsvvpp.c:137
AVFrame::repeat_pict
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:464
QSVVPPParam::ext_buf
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:90
submit_frame
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:357
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:414
query_frame
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:428