FFmpeg
vf_tonemap_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 #include <string.h>
19 
20 #include "libavutil/avassert.h"
21 #include "libavutil/mem.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
25 
26 #include "avfilter.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "vaapi_vpp.h"
30 
31 typedef struct HDRVAAPIContext {
32  VAAPIVPPContext vpp_ctx; // must be the first field
33 
35 
39 
43 
44  VAHdrMetaDataHDR10 in_metadata;
45 
49 
50 static int tonemap_vaapi_save_metadata(AVFilterContext *avctx, AVFrame *input_frame)
51 {
52  HDRVAAPIContext *ctx = avctx->priv;
54  AVContentLightMetadata *light_meta;
55 
56  if (input_frame->color_trc != AVCOL_TRC_SMPTE2084) {
57  av_log(avctx, AV_LOG_WARNING, "Only support HDR10 as input for vaapi tone-mapping\n");
58  }
59 
60  ctx->src_display = av_frame_get_side_data(input_frame,
62  if (ctx->src_display) {
63  hdr_meta = (AVMasteringDisplayMetadata *)ctx->src_display->data;
64  if (!hdr_meta) {
65  av_log(avctx, AV_LOG_ERROR, "No mastering display data\n");
66  return AVERROR(EINVAL);
67  }
68 
69  if (hdr_meta->has_luminance) {
70  const int luma_den = 10000;
71  ctx->in_metadata.max_display_mastering_luminance =
72  lrint(luma_den * av_q2d(hdr_meta->max_luminance));
73  ctx->in_metadata.min_display_mastering_luminance =
74  FFMIN(lrint(luma_den * av_q2d(hdr_meta->min_luminance)),
75  ctx->in_metadata.max_display_mastering_luminance);
76 
77  av_log(avctx, AV_LOG_DEBUG,
78  "Mastering Display Metadata(in luminance):\n");
79  av_log(avctx, AV_LOG_DEBUG,
80  "min_luminance=%u, max_luminance=%u\n",
81  ctx->in_metadata.min_display_mastering_luminance,
82  ctx->in_metadata.max_display_mastering_luminance);
83  }
84 
85  if (hdr_meta->has_primaries) {
86  int i;
87  const int mapping[3] = {1, 2, 0}; //green, blue, red
88  const int chroma_den = 50000;
89 
90  for (i = 0; i < 3; i++) {
91  const int j = mapping[i];
92  ctx->in_metadata.display_primaries_x[i] =
93  FFMIN(lrint(chroma_den *
94  av_q2d(hdr_meta->display_primaries[j][0])),
95  chroma_den);
96  ctx->in_metadata.display_primaries_y[i] =
97  FFMIN(lrint(chroma_den *
98  av_q2d(hdr_meta->display_primaries[j][1])),
99  chroma_den);
100  }
101 
102  ctx->in_metadata.white_point_x =
103  FFMIN(lrint(chroma_den * av_q2d(hdr_meta->white_point[0])),
104  chroma_den);
105  ctx->in_metadata.white_point_y =
106  FFMIN(lrint(chroma_den * av_q2d(hdr_meta->white_point[1])),
107  chroma_den);
108 
109  av_log(avctx, AV_LOG_DEBUG,
110  "Mastering Display Metadata(in primaries):\n");
111  av_log(avctx, AV_LOG_DEBUG,
112  "G(%u,%u) B(%u,%u) R(%u,%u) WP(%u,%u)\n",
113  ctx->in_metadata.display_primaries_x[0],
114  ctx->in_metadata.display_primaries_y[0],
115  ctx->in_metadata.display_primaries_x[1],
116  ctx->in_metadata.display_primaries_y[1],
117  ctx->in_metadata.display_primaries_x[2],
118  ctx->in_metadata.display_primaries_y[2],
119  ctx->in_metadata.white_point_x,
120  ctx->in_metadata.white_point_y);
121  }
122  } else {
123  av_log(avctx, AV_LOG_ERROR, "No mastering display data from input\n");
124  return AVERROR(EINVAL);
125  }
126 
127  ctx->src_light = av_frame_get_side_data(input_frame,
129  if (ctx->src_light) {
130  light_meta = (AVContentLightMetadata *)ctx->src_light->data;
131  if (!light_meta) {
132  av_log(avctx, AV_LOG_ERROR, "No light metadata\n");
133  return AVERROR(EINVAL);
134  }
135 
136  ctx->in_metadata.max_content_light_level = light_meta->MaxCLL;
137  ctx->in_metadata.max_pic_average_light_level = light_meta->MaxFALL;
138 
139  av_log(avctx, AV_LOG_DEBUG,
140  "Mastering Content Light Level (in):\n");
141  av_log(avctx, AV_LOG_DEBUG,
142  "MaxCLL(%u) MaxFALL(%u)\n",
143  ctx->in_metadata.max_content_light_level,
144  ctx->in_metadata.max_pic_average_light_level);
145  } else {
146  av_log(avctx, AV_LOG_DEBUG, "No content light level from input\n");
147  }
148  return 0;
149 }
150 
152 {
153  VAAPIVPPContext *vpp_ctx = avctx->priv;
154  HDRVAAPIContext *ctx = avctx->priv;
155  VAStatus vas;
156  VAProcFilterParameterBufferHDRToneMapping *hdrtm_param;
157 
158  vas = vaMapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0],
159  (void**)&hdrtm_param);
160  if (vas != VA_STATUS_SUCCESS) {
161  av_log(avctx, AV_LOG_ERROR, "Failed to map "
162  "buffer (%d): %d (%s).\n",
163  vpp_ctx->filter_buffers[0], vas, vaErrorStr(vas));
164  return AVERROR(EIO);
165  }
166 
167  memcpy(hdrtm_param->data.metadata, &ctx->in_metadata, sizeof(VAHdrMetaDataHDR10));
168 
169  vas = vaUnmapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0]);
170  if (vas != VA_STATUS_SUCCESS) {
171  av_log(avctx, AV_LOG_ERROR, "Failed to unmap output buffers: "
172  "%d (%s).\n", vas, vaErrorStr(vas));
173  return AVERROR(EIO);
174  }
175 
176  return 0;
177 }
178 
180 {
181  VAAPIVPPContext *vpp_ctx = avctx->priv;
182  HDRVAAPIContext *ctx = avctx->priv;
183  VAStatus vas;
184  VAProcFilterParameterBufferHDRToneMapping hdrtm_param;
185  VAProcFilterCapHighDynamicRange hdr_cap[VAProcHighDynamicRangeMetadataTypeCount];
186  int num_query_caps;
187  int i;
188 
189  memset(&hdrtm_param, 0, sizeof(hdrtm_param));
190  memset(&ctx->in_metadata, 0, sizeof(ctx->in_metadata));
191 
192  num_query_caps = VAProcHighDynamicRangeMetadataTypeCount;
193  vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display,
194  vpp_ctx->va_context,
195  VAProcFilterHighDynamicRangeToneMapping,
196  &hdr_cap, &num_query_caps);
197  if (vas != VA_STATUS_SUCCESS) {
198  av_log(avctx, AV_LOG_ERROR, "Failed to query HDR caps "
199  "context: %d (%s).\n", vas, vaErrorStr(vas));
200  return AVERROR(EIO);
201  }
202 
203  for (i = 0; i < num_query_caps; i++) {
204  if (hdr_cap[i].metadata_type != VAProcHighDynamicRangeMetadataNone)
205  break;
206  }
207 
208  if (i >= num_query_caps) {
209  av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support HDR\n");
210  return AVERROR(EINVAL);
211  }
212 
213  for (i = 0; i < num_query_caps; i++) {
214  if (VA_TONE_MAPPING_HDR_TO_SDR & hdr_cap[i].caps_flag)
215  break;
216  }
217 
218  if (i >= num_query_caps) {
219  av_log(avctx, AV_LOG_ERROR,
220  "VAAPI driver doesn't support HDR to SDR\n");
221  return AVERROR(EINVAL);
222  }
223 
224  hdrtm_param.type = VAProcFilterHighDynamicRangeToneMapping;
225  hdrtm_param.data.metadata_type = VAProcHighDynamicRangeMetadataHDR10;
226  hdrtm_param.data.metadata = &ctx->in_metadata;
227  hdrtm_param.data.metadata_size = sizeof(VAHdrMetaDataHDR10);
228 
229  return ff_vaapi_vpp_make_param_buffers(avctx,
230  VAProcFilterParameterBufferType,
231  &hdrtm_param, sizeof(hdrtm_param), 1);
232 }
233 
235 {
236  AVFilterContext *avctx = inlink->dst;
237  AVFilterLink *outlink = avctx->outputs[0];
238  VAAPIVPPContext *vpp_ctx = avctx->priv;
239  HDRVAAPIContext *ctx = avctx->priv;
241  VASurfaceID input_surface, output_surface;
242 
243  VAProcPipelineParameterBuffer params;
244  int err;
245 
246  av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
247  av_get_pix_fmt_name(input_frame->format),
248  input_frame->width, input_frame->height, input_frame->pts);
249 
250  if (vpp_ctx->va_context == VA_INVALID_ID){
251  av_frame_free(&input_frame);
252  return AVERROR(EINVAL);
253  }
254 
255  err = tonemap_vaapi_save_metadata(avctx, input_frame);
256  if (err < 0)
257  goto fail;
258 
259  err = tonemap_vaapi_set_filter_params(avctx, input_frame);
260  if (err < 0)
261  goto fail;
262 
263  input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
264  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for tonemap vpp input.\n",
265  input_surface);
266 
267  output_frame = ff_get_video_buffer(outlink, vpp_ctx->output_width,
268  vpp_ctx->output_height);
269  if (!output_frame) {
270  err = AVERROR(ENOMEM);
271  goto fail;
272  }
273 
274  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
275  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for tonemap vpp output.\n",
276  output_surface);
277  memset(&params, 0, sizeof(params));
278 
279  err = av_frame_copy_props(output_frame, input_frame);
280  if (err < 0)
281  goto fail;
282 
284  output_frame->color_primaries = ctx->color_primaries;
285 
287  output_frame->color_trc = ctx->color_transfer;
288  else
289  output_frame->color_trc = AVCOL_TRC_BT709;
290 
292  output_frame->colorspace = ctx->color_matrix;
293 
294  err = ff_vaapi_vpp_init_params(avctx, &params,
295  input_frame, output_frame);
296  if (err < 0)
297  goto fail;
298 
299  err = ff_vaapi_vpp_render_picture(avctx, &params, output_frame);
300  if (err < 0)
301  goto fail;
302 
303  av_frame_free(&input_frame);
304 
305  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
306  av_get_pix_fmt_name(output_frame->format),
307  output_frame->width, output_frame->height, output_frame->pts);
308 
309  return ff_filter_frame(outlink, output_frame);
310 
311 fail:
312  av_frame_free(&input_frame);
313  av_frame_free(&output_frame);
314  return err;
315 }
316 
318 {
319  VAAPIVPPContext *vpp_ctx = avctx->priv;
320  HDRVAAPIContext *ctx = avctx->priv;
321 
322  ff_vaapi_vpp_ctx_init(avctx);
325 
326  if (ctx->output_format_string) {
328  switch (vpp_ctx->output_format) {
329  case AV_PIX_FMT_NV12:
330  case AV_PIX_FMT_P010:
331  break;
332  default:
333  av_log(avctx, AV_LOG_ERROR, "Invalid output format.\n");
334  return AVERROR(EINVAL);
335  }
336  } else {
337  vpp_ctx->output_format = AV_PIX_FMT_NV12;
338  av_log(avctx, AV_LOG_WARNING, "Output format not set, use default format NV12\n");
339  }
340 
341 #define STRING_OPTION(var_name, func_name, default_value) do { \
342  if (ctx->var_name ## _string) { \
343  int var = av_ ## func_name ## _from_name(ctx->var_name ## _string); \
344  if (var < 0) { \
345  av_log(avctx, AV_LOG_ERROR, "Invalid %s.\n", #var_name); \
346  return AVERROR(EINVAL); \
347  } \
348  ctx->var_name = var; \
349  } else { \
350  ctx->var_name = default_value; \
351  } \
352  } while (0)
353 
357 
358  return 0;
359 }
360 
361 #define OFFSET(x) offsetof(HDRVAAPIContext, x)
362 #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM)
363 static const AVOption tonemap_vaapi_options[] = {
364  { "format", "Output pixel format set", OFFSET(output_format_string), AV_OPT_TYPE_STRING, .flags = FLAGS, "format" },
365  { "matrix", "Output color matrix coefficient set",
367  { .str = NULL }, .flags = FLAGS, "matrix" },
368  { "m", "Output color matrix coefficient set",
370  { .str = NULL }, .flags = FLAGS, "matrix" },
371  { "primaries", "Output color primaries set",
373  { .str = NULL }, .flags = FLAGS, "primaries" },
374  { "p", "Output color primaries set",
376  { .str = NULL }, .flags = FLAGS, "primaries" },
377  { "transfer", "Output color transfer characteristics set",
379  { .str = NULL }, .flags = FLAGS, "transfer" },
380  { "t", "Output color transfer characteristics set",
382  { .str = NULL }, .flags = FLAGS, "transfer" },
383  { NULL }
384 };
385 
386 
387 AVFILTER_DEFINE_CLASS(tonemap_vaapi);
388 
390  {
391  .name = "default",
392  .type = AVMEDIA_TYPE_VIDEO,
393  .filter_frame = &tonemap_vaapi_filter_frame,
394  .config_props = &ff_vaapi_vpp_config_input,
395  },
396  { NULL }
397 };
398 
400  {
401  .name = "default",
402  .type = AVMEDIA_TYPE_VIDEO,
403  .config_props = &ff_vaapi_vpp_config_output,
404  },
405  { NULL }
406 };
407 
409  .name = "tonemap_vaapi",
410  .description = NULL_IF_CONFIG_SMALL("VAAPI VPP for tone-mapping"),
411  .priv_size = sizeof(HDRVAAPIContext),
415  .inputs = tonemap_vaapi_inputs,
416  .outputs = tonemap_vaapi_outputs,
417  .priv_class = &tonemap_vaapi_class,
418  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
419 };
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:339
This structure describes decoded (raw) audio or video data.
Definition: frame.h:308
unsigned MaxCLL
Max content light level (cd/m^2).
int ff_vaapi_vpp_config_input(AVFilterLink *inlink)
Definition: vaapi_vpp.c:70
AVOption.
Definition: opt.h:248
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
Main libavfilter public API header.
Memory handling functions.
int ff_vaapi_vpp_config_output(AVFilterLink *outlink)
Definition: vaapi_vpp.c:95
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
static int tonemap_vaapi_build_filter_params(AVFilterContext *avctx)
static const AVOption tonemap_vaapi_options[]
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Content light level (based on CTA-861.3).
Definition: frame.h:136
AVFILTER_DEFINE_CLASS(tonemap_vaapi)
Mastering display metadata associated with a video frame.
Definition: frame.h:119
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
static const AVFilterPad tonemap_vaapi_outputs[]
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:483
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:751
const char * name
Pad name.
Definition: internal.h:60
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
VAHdrMetaDataHDR10 in_metadata
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1091
#define av_cold
Definition: attributes.h:88
#define STRING_OPTION(var_name, func_name, default_value)
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVFrameSideData * src_light
AVOptions.
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:512
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:401
Structure to hold side data for an AVFrame.
Definition: frame.h:214
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
static int tonemap_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:458
char * color_matrix_string
AVFilter ff_vf_tonemap_vaapi
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
char * color_transfer_string
int width
Definition: frame.h:366
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
static av_cold int tonemap_vaapi_init(AVFilterContext *avctx)
enum AVColorSpace color_matrix
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:153
void * priv
private data for use by the filter
Definition: avfilter.h:354
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
GLenum GLint * params
Definition: opengl_enc.c:113
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:563
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
simple assert() macros that are a bit more flexible than ISO C assert().
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx, int type, const void *data, size_t size, int count)
Definition: vaapi_vpp.c:563
#define fail()
Definition: checkasm.h:123
static int tonemap_vaapi_set_filter_params(AVFilterContext *avctx, AVFrame *input_frame)
char * color_primaries_string
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:500
#define FFMIN(a, b)
Definition: common.h:96
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
void ff_vaapi_vpp_pipeline_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:44
static const AVFilterPad tonemap_vaapi_inputs[]
AVFormatContext * ctx
Definition: movenc.c:48
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
if(ret)
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:381
also ITU-R BT1361
Definition: pixfmt.h:485
VABufferID filter_buffers[VAProcFilterCount]
Definition: vaapi_vpp.h:51
uint8_t * data
Definition: frame.h:216
static int tonemap_vaapi_save_metadata(AVFilterContext *avctx, AVFrame *input_frame)
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:824
Filter definition.
Definition: avfilter.h:145
Mastering display metadata capable of representing the color volume of the display used to master the...
#define OFFSET(x)
AVFrameSideData * src_display
const char * name
Filter name.
Definition: avfilter.h:149
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:351
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
VAContextID va_context
Definition: vaapi_vpp.h:41
enum AVPixelFormat output_format
Definition: vaapi_vpp.h:47
AVVAAPIDeviceContext * hwctx
Definition: vaapi_vpp.h:36
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:322
int ff_vaapi_vpp_render_picture(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, AVFrame *output_frame)
Definition: vaapi_vpp.c:592
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
int ff_vaapi_vpp_query_formats(AVFilterContext *avctx)
Definition: vaapi_vpp.c:27
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
Definition: vaapi_vpp.c:666
char * output_format_string
enum AVColorPrimaries color_primaries
#define lrint
Definition: tablegen.h:53
enum AVColorPrimaries color_primaries
Definition: frame.h:554
An instance of a filter.
Definition: avfilter.h:339
enum AVColorTransferCharacteristic color_transfer
int height
Definition: frame.h:366
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:556
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, const AVFrame *input_frame, AVFrame *output_frame)
Definition: vaapi_vpp.c:515
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:2501
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
int(* build_filter_params)(AVFilterContext *avctx)
Definition: vaapi_vpp.h:54
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
unsigned MaxFALL
Max average light level per frame (cd/m^2).
#define FLAGS
void(* pipeline_uninit)(AVFilterContext *avctx)
Definition: vaapi_vpp.h:56
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:680
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:671
int i
Definition: input.c:407
VAAPIVPPContext vpp_ctx