FFmpeg
vf_amf_common.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "vf_amf_common.h"
20 
21 #include "libavutil/avassert.h"
22 #include "avfilter.h"
23 #include "avfilter_internal.h"
24 #include "formats.h"
25 #include "libavutil/mem.h"
26 #include "libavutil/imgutils.h"
27 
30 #include "AMF/components/ColorSpace.h"
31 #include "scale_eval.h"
32 
33 #if CONFIG_DXVA2
34 #include <d3d9.h>
35 #endif
36 
37 #if CONFIG_D3D11VA
38 #include <d3d11.h>
39 #endif
40 
42 {
43  AMFFilterContext *ctx = avctx->priv;
44 
45  if (!strcmp(ctx->format_str, "same")) {
46  ctx->format = AV_PIX_FMT_NONE;
47  } else {
48  ctx->format = av_get_pix_fmt(ctx->format_str);
49  if (ctx->format == AV_PIX_FMT_NONE) {
50  av_log(avctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", ctx->format_str);
51  return AVERROR(EINVAL);
52  }
53  }
54 
55  return 0;
56 }
57 
59 {
60  AMFFilterContext *ctx = avctx->priv;
61 
62  if (ctx->component) {
63  ctx->component->pVtbl->Terminate(ctx->component);
64  ctx->component->pVtbl->Release(ctx->component);
65  ctx->component = NULL;
66  }
67 
68  av_buffer_unref(&ctx->amf_device_ref);
69  av_buffer_unref(&ctx->hwdevice_ref);
70  av_buffer_unref(&ctx->hwframes_in_ref);
71  av_buffer_unref(&ctx->hwframes_out_ref);
72 }
73 
75 {
76  AVFilterContext *avctx = inlink->dst;
77  AMFFilterContext *ctx = avctx->priv;
78  AVFilterLink *outlink = avctx->outputs[0];
79  AMF_RESULT res;
80  AMFSurface *surface_in;
81  AMFSurface *surface_out;
82  AMFData *data_out = NULL;
83  enum AVColorSpace out_colorspace;
84  enum AVColorRange out_color_range;
85 
86  AVFrame *out = NULL;
87  int ret = 0;
88 
89  if (!ctx->component)
90  return AVERROR(EINVAL);
91 
92  ret = amf_avframe_to_amfsurface(avctx, in, &surface_in);
93  if (ret < 0)
94  goto fail;
95 
96  res = ctx->component->pVtbl->SubmitInput(ctx->component, (AMFData*)surface_in);
97  surface_in->pVtbl->Release(surface_in); // release surface after use
98  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
99  res = ctx->component->pVtbl->QueryOutput(ctx->component, &data_out);
100  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryOutput() failed with error %d\n", res);
101 
102  if (data_out) {
103  AMFGuid guid = IID_AMFSurface();
104  res = data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface_out); // query for buffer interface
105  data_out->pVtbl->Release(data_out);
106  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryInterface(IID_AMFSurface) failed with error %d\n", res);
107  } else {
108  return AVERROR(EAGAIN);
109  }
110 
111  out = amf_amfsurface_to_avframe(avctx, surface_out);
112 
113  ret = av_frame_copy_props(out, in);
114  av_frame_unref(in);
115 
116  out_colorspace = AVCOL_SPC_UNSPECIFIED;
117 
118  if (ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
119  switch(ctx->color_profile) {
120  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
121  out_colorspace = AVCOL_SPC_SMPTE170M;
122  break;
123  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
124  out_colorspace = AVCOL_SPC_BT709;
125  break;
126  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
127  out_colorspace = AVCOL_SPC_BT2020_NCL;
128  break;
129  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
130  out_colorspace = AVCOL_SPC_RGB;
131  break;
132  default:
133  out_colorspace = AVCOL_SPC_UNSPECIFIED;
134  break;
135  }
136  out->colorspace = out_colorspace;
137  }
138 
139  out_color_range = AVCOL_RANGE_UNSPECIFIED;
140  if (ctx->color_range == AMF_COLOR_RANGE_FULL)
141  out_color_range = AVCOL_RANGE_JPEG;
142  else if (ctx->color_range == AMF_COLOR_RANGE_STUDIO)
143  out_color_range = AVCOL_RANGE_MPEG;
144 
145  if (ctx->color_range != AMF_COLOR_RANGE_UNDEFINED)
146  out->color_range = out_color_range;
147 
148  if (ctx->primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
149  out->color_primaries = ctx->primaries;
150 
151  if (ctx->trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
152  out->color_trc = ctx->trc;
153 
154 
155  if (ret < 0)
156  goto fail;
157 
158  out->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
159  if (!out->hw_frames_ctx) {
160  ret = AVERROR(ENOMEM);
161  goto fail;
162  }
163 
164  av_frame_free(&in);
165  return ff_filter_frame(outlink, out);
166 fail:
167  av_frame_free(&in);
168  av_frame_free(&out);
169  return ret;
170 }
171 
172 
173 
175  const enum AVPixelFormat *input_pix_fmts,
176  const enum AVPixelFormat *output_pix_fmts)
177 {
178  int err;
179  AVFilterFormats *input_formats;
180  AVFilterFormats *output_formats;
181 
182  //in case if hw_device_ctx is set to DXVA2 we change order of pixel formats to set DXVA2 be chosen by default
183  //The order is ignored if hw_frames_ctx is not NULL on the config_output stage
184  if (avctx->hw_device_ctx) {
185  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
186 
187  switch (device_ctx->type) {
188  #if CONFIG_D3D11VA
190  {
191  static const enum AVPixelFormat output_pix_fmts_d3d11[] = {
194  };
195  output_pix_fmts = output_pix_fmts_d3d11;
196  }
197  break;
198  #endif
199  #if CONFIG_DXVA2
201  {
202  static const enum AVPixelFormat output_pix_fmts_dxva2[] = {
205  };
206  output_pix_fmts = output_pix_fmts_dxva2;
207  }
208  break;
209  #endif
211  break;
212  default:
213  {
214  av_log(avctx, AV_LOG_ERROR, "Unsupported device : %s\n", av_hwdevice_get_type_name(device_ctx->type));
215  return AVERROR(EINVAL);
216  }
217  break;
218  }
219  }
220 
221  input_formats = ff_make_format_list(output_pix_fmts);
222  if (!input_formats) {
223  return AVERROR(ENOMEM);
224  }
225  output_formats = ff_make_format_list(output_pix_fmts);
226  if (!output_formats) {
227  return AVERROR(ENOMEM);
228  }
229 
230  if ((err = ff_formats_ref(input_formats, &avctx->inputs[0]->outcfg.formats)) < 0)
231  return err;
232 
233  if ((err = ff_formats_ref(output_formats, &avctx->outputs[0]->incfg.formats)) < 0)
234  return err;
235  return 0;
236 }
237 
239  AMFSurface* surface)
240 {
241  AMFPlane *plane;
242  uint8_t *dst_data[4];
243  int dst_linesize[4];
244  int planes;
245  int i;
246 
247  planes = (int)surface->pVtbl->GetPlanesCount(surface);
248  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
249 
250  for (i = 0; i < planes; i++) {
251  plane = surface->pVtbl->GetPlaneAt(surface, i);
252  dst_data[i] = plane->pVtbl->GetNative(plane);
253  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
254  }
255  av_image_copy(dst_data, dst_linesize,
256  (const uint8_t**)frame->data, frame->linesize, frame->format,
257  frame->width, frame->height);
258 
259  return 0;
260 }
261 
262 int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
263 {
264  int err;
265  AMF_RESULT res;
266  AVFilterContext *avctx = outlink->src;
267  AVFilterLink *inlink = avctx->inputs[0];
268  AMFFilterContext *ctx = avctx->priv;
269  AVHWFramesContext *hwframes_out;
270  AVHWDeviceContext *hwdev_ctx;
271  enum AVPixelFormat in_sw_format = inlink->format;
272  enum AVPixelFormat out_sw_format = ctx->format;
274  FilterLink *outl = ff_filter_link(outlink);
275  double w_adj = 1.0;
276 
277  if ((err = ff_scale_eval_dimensions(avctx,
278  ctx->w_expr, ctx->h_expr,
279  inlink, outlink,
280  &ctx->width, &ctx->height)) < 0)
281  return err;
282 
283  if (ctx->reset_sar && inlink->sample_aspect_ratio.num)
284  w_adj = (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den;
285 
286  ff_scale_adjust_dimensions(inlink, &ctx->width, &ctx->height,
287  ctx->force_original_aspect_ratio, ctx->force_divisible_by, w_adj);
288 
289  av_buffer_unref(&ctx->amf_device_ref);
290  av_buffer_unref(&ctx->hwframes_in_ref);
291  av_buffer_unref(&ctx->hwframes_out_ref);
292  ctx->local_context = 0;
293  if (inl->hw_frames_ctx) {
295  if (av_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
296  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
297  av_get_pix_fmt_name(frames_ctx->sw_format));
298  return AVERROR(EINVAL);
299  }
300 
301  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
302  if (err < 0)
303  return err;
304 
305  ctx->hwframes_in_ref = av_buffer_ref(inl->hw_frames_ctx);
306  if (!ctx->hwframes_in_ref)
307  return AVERROR(ENOMEM);
308 
309  in_sw_format = frames_ctx->sw_format;
310  } else if (avctx->hw_device_ctx) {
311  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
312  if (err < 0)
313  return err;
314  ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
315  if (!ctx->hwdevice_ref)
316  return AVERROR(ENOMEM);
317  } else {
318  res = av_hwdevice_ctx_create(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
319  AMF_RETURN_IF_FALSE(avctx, res == 0, res, "Failed to create hardware device context (AMF) : %s\n", av_err2str(res));
320 
321  }
322  if(out_sw_format == AV_PIX_FMT_NONE){
323  if(outlink->format == AV_PIX_FMT_AMF_SURFACE)
324  out_sw_format = in_sw_format;
325  else
326  out_sw_format = outlink->format;
327  }
328  ctx->hwframes_out_ref = av_hwframe_ctx_alloc(ctx->amf_device_ref);
329  if (!ctx->hwframes_out_ref)
330  return AVERROR(ENOMEM);
331  hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
332  hwdev_ctx = (AVHWDeviceContext*)ctx->amf_device_ref->data;
333  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
334  {
335  ctx->amf_device_ctx = hwdev_ctx->hwctx;
336  }
337  hwframes_out->format = AV_PIX_FMT_AMF_SURFACE;
338  hwframes_out->sw_format = out_sw_format;
339 
340  if (inlink->format == AV_PIX_FMT_AMF_SURFACE) {
341  *in_format = in_sw_format;
342  } else {
343  *in_format = inlink->format;
344  }
345  outlink->w = ctx->width;
346  outlink->h = ctx->height;
347 
348  if (ctx->reset_sar)
349  outlink->sample_aspect_ratio = (AVRational){1, 1};
350  else if (inlink->sample_aspect_ratio.num) {
351  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink->w, outlink->w * inlink->h}, inlink->sample_aspect_ratio);
352  } else
353  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
354 
355  hwframes_out->width = outlink->w;
356  hwframes_out->height = outlink->h;
357 
358  err = av_hwframe_ctx_init(ctx->hwframes_out_ref);
359  if (err < 0)
360  return err;
361 
362  outl->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
363  if (!outl->hw_frames_ctx) {
364  return AVERROR(ENOMEM);
365  }
366  return 0;
367 }
368 
369 void amf_free_amfsurface(void *opaque, uint8_t *data)
370 {
371  AMFSurface *surface = (AMFSurface*)data;
372  surface->pVtbl->Release(surface);
373 }
374 
375 AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface)
376 {
378  AMFFilterContext *ctx = avctx->priv;
379 
380  if (!frame)
381  return NULL;
382 
383  if (ctx->hwframes_out_ref) {
384  AVHWFramesContext *hwframes_out = (AVHWFramesContext *)ctx->hwframes_out_ref->data;
385  if (hwframes_out->format == AV_PIX_FMT_AMF_SURFACE) {
386  int ret = av_hwframe_get_buffer(ctx->hwframes_out_ref, frame, 0);
387  if (ret < 0) {
388  av_log(avctx, AV_LOG_ERROR, "Get hw frame failed.\n");
390  return NULL;
391  }
392  frame->data[0] = (uint8_t *)pSurface;
393  frame->buf[1] = av_buffer_create((uint8_t *)pSurface, sizeof(AMFSurface),
395  (void*)avctx,
397  } else { // FIXME: add processing of other hw formats
398  av_log(ctx, AV_LOG_ERROR, "Unknown pixel format\n");
399  return NULL;
400  }
401  } else {
402 
403  switch (pSurface->pVtbl->GetMemoryType(pSurface))
404  {
405  #if CONFIG_D3D11VA
406  case AMF_MEMORY_DX11:
407  {
408  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
409  frame->data[0] = plane0->pVtbl->GetNative(plane0);
410  frame->data[1] = (uint8_t*)(intptr_t)0;
411 
412  frame->buf[0] = av_buffer_create(NULL,
413  0,
415  pSurface,
417  }
418  break;
419  #endif
420  #if CONFIG_DXVA2
421  case AMF_MEMORY_DX9:
422  {
423  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
424  frame->data[3] = plane0->pVtbl->GetNative(plane0);
425 
426  frame->buf[0] = av_buffer_create(NULL,
427  0,
429  pSurface,
431  }
432  break;
433  #endif
434  default:
435  {
436  av_log(avctx, AV_LOG_ERROR, "Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
437  return NULL;
438  }
439  }
440  }
441 
442  return frame;
443 }
444 
445 int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface)
446 {
447  AMFFilterContext *ctx = avctx->priv;
448  AMFSurface *surface;
449  AMF_RESULT res;
450  int hw_surface = 0;
451 
452  switch (frame->format) {
453 #if CONFIG_D3D11VA
454  case AV_PIX_FMT_D3D11:
455  {
456  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
457  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
458  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
459  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
460 
461  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
462  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
463  hw_surface = 1;
464  }
465  break;
466 #endif
468  {
469  surface = (AMFSurface*)frame->data[0]; // actual surface
470  surface->pVtbl->Acquire(surface); // returned surface has to be to be ref++
471  hw_surface = 1;
472  }
473  break;
474 
475 #if CONFIG_DXVA2
477  {
478  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
479 
480  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
481  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
482  hw_surface = 1;
483  }
484  break;
485 #endif
486  default:
487  {
488  AMF_SURFACE_FORMAT amf_fmt = av_av_to_amf_format(frame->format);
489  res = ctx->amf_device_ctx->context->pVtbl->AllocSurface(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt, frame->width, frame->height, &surface);
490  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
491  amf_copy_surface(avctx, frame, surface);
492  }
493  break;
494  }
495 
496  if (frame->crop_left || frame->crop_right || frame->crop_top || frame->crop_bottom) {
497  size_t crop_x = frame->crop_left;
498  size_t crop_y = frame->crop_top;
499  size_t crop_w = frame->width - (frame->crop_left + frame->crop_right);
500  size_t crop_h = frame->height - (frame->crop_top + frame->crop_bottom);
501  AVFilterLink *outlink = avctx->outputs[0];
502  if (crop_x || crop_y) {
503  if (crop_w == outlink->w && crop_h == outlink->h) {
504  AMFData *cropped_buffer = NULL;
505  res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
506  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "Duplicate() failed with error %d\n", res);
507  surface->pVtbl->Release(surface);
508  surface = (AMFSurface*)cropped_buffer;
509  }
510  else
511  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
512  }
513  else
514  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
515  }
516  else if (hw_surface) {
517  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
518  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
519  }
520 
521  surface->pVtbl->SetPts(surface, frame->pts);
522  *ppSurface = surface;
523  return 0;
524 }
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
amf_avframe_to_amfsurface
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Definition: vf_amf_common.c:445
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:499
out
FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:777
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
data
const char data[16]
Definition: mxf.c:149
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:701
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:356
amf_setup_input_output_formats
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
Definition: vf_amf_common.c:174
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
amf_free_amfsurface
void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: vf_amf_common.c:369
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
fail
#define fail()
Definition: checkasm.h:214
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:131
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:707
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:753
vf_amf_common.h
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:41
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AMFFilterContext
Definition: vf_amf_common.h:28
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
hwcontext_amf.h
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:120
if
if(ret)
Definition: filter_design.txt:179
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:599
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:282
double
double
Definition: af_crystalizer.c:132
planes
static const struct @548 planes[]
avfilter_internal.h
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:743
index
int index
Definition: gxfenc.c:90
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
scale_eval.h
amf_copy_surface
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: vf_amf_common.c:238
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
amf_filter_filter_frame
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_amf_common.c:74
amf_filter_uninit
void amf_filter_uninit(AVFilterContext *avctx)
Definition: vf_amf_common.c:58
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:711
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:700
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:496
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:703
amf_filter_init
int amf_filter_init(AVFilterContext *avctx)
Definition: vf_amf_common.c:41
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:760
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:3388
hwcontext_amf_internal.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
avfilter.h
amf_init_filter_config
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
Definition: vf_amf_common.c:262
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
mem.h
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:126
amf_amfsurface_to_avframe
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
Definition: vf_amf_common.c:375
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_image_copy
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:702
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:742
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:506
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:286