FFmpeg
vf_crop.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * video crop filter
24  */
25 
26 #include <stdio.h>
27 
28 #include "avfilter.h"
29 #include "formats.h"
30 #include "internal.h"
31 #include "video.h"
32 #include "libavutil/eval.h"
33 #include "libavutil/avstring.h"
34 #include "libavutil/internal.h"
35 #include "libavutil/libm.h"
36 #include "libavutil/imgutils.h"
37 #include "libavutil/mathematics.h"
38 #include "libavutil/opt.h"
39 
40 static const char *const var_names[] = {
41  "in_w", "iw", ///< width of the input video
42  "in_h", "ih", ///< height of the input video
43  "out_w", "ow", ///< width of the cropped video
44  "out_h", "oh", ///< height of the cropped video
45  "a",
46  "sar",
47  "dar",
48  "hsub",
49  "vsub",
50  "x",
51  "y",
52  "n", ///< number of frame
53  "pos", ///< position in the file
54  "t", ///< timestamp expressed in seconds
55  NULL
56 };
57 
58 enum var_name {
74 };
75 
76 typedef struct CropContext {
77  const AVClass *class;
78  int x; ///< x offset of the non-cropped area with respect to the input area
79  int y; ///< y offset of the non-cropped area with respect to the input area
80  int w; ///< width of the cropped area
81  int h; ///< height of the cropped area
82 
83  AVRational out_sar; ///< output sample aspect ratio
84  int keep_aspect; ///< keep display aspect ratio when cropping
85  int exact; ///< exact cropping, for subsampled formats
86 
87  int max_step[4]; ///< max pixel step for each plane, expressed as a number of bytes
88  int hsub, vsub; ///< chroma subsampling
89  char *x_expr, *y_expr, *w_expr, *h_expr;
90  AVExpr *x_pexpr, *y_pexpr; /* parsed expressions for x and y */
92 } CropContext;
93 
95 {
97  int fmt, ret;
98 
99  for (fmt = 0; av_pix_fmt_desc_get(fmt); fmt++) {
101  if (desc->flags & AV_PIX_FMT_FLAG_BITSTREAM)
102  continue;
103  if (!(desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) {
104  // Not usable if there is any subsampling but the format is
105  // not planar (e.g. YUYV422).
106  if ((desc->log2_chroma_w || desc->log2_chroma_h) &&
107  !(desc->flags & AV_PIX_FMT_FLAG_PLANAR))
108  continue;
109  }
110  ret = ff_add_format(&formats, fmt);
111  if (ret < 0)
112  return ret;
113  }
114 
115  return ff_set_common_formats(ctx, formats);
116 }
117 
119 {
120  CropContext *s = ctx->priv;
121 
122  av_expr_free(s->x_pexpr);
123  s->x_pexpr = NULL;
124  av_expr_free(s->y_pexpr);
125  s->y_pexpr = NULL;
126 }
127 
128 static inline int normalize_double(int *n, double d)
129 {
130  int ret = 0;
131 
132  if (isnan(d)) {
133  ret = AVERROR(EINVAL);
134  } else if (d > INT_MAX || d < INT_MIN) {
135  *n = d > INT_MAX ? INT_MAX : INT_MIN;
136  ret = AVERROR(EINVAL);
137  } else
138  *n = lrint(d);
139 
140  return ret;
141 }
142 
144 {
145  AVFilterContext *ctx = link->dst;
146  CropContext *s = ctx->priv;
147  const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(link->format);
148  int ret;
149  const char *expr;
150  double res;
151 
152  s->var_values[VAR_IN_W] = s->var_values[VAR_IW] = ctx->inputs[0]->w;
153  s->var_values[VAR_IN_H] = s->var_values[VAR_IH] = ctx->inputs[0]->h;
154  s->var_values[VAR_A] = (float) link->w / link->h;
157  s->var_values[VAR_HSUB] = 1<<pix_desc->log2_chroma_w;
158  s->var_values[VAR_VSUB] = 1<<pix_desc->log2_chroma_h;
159  s->var_values[VAR_X] = NAN;
160  s->var_values[VAR_Y] = NAN;
163  s->var_values[VAR_N] = 0;
164  s->var_values[VAR_T] = NAN;
165  s->var_values[VAR_POS] = NAN;
166 
168 
169  if (pix_desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
170  s->hsub = 1;
171  s->vsub = 1;
172  } else {
173  s->hsub = pix_desc->log2_chroma_w;
174  s->vsub = pix_desc->log2_chroma_h;
175  }
176 
177  av_expr_parse_and_eval(&res, (expr = s->w_expr),
178  var_names, s->var_values,
179  NULL, NULL, NULL, NULL, NULL, 0, ctx);
180  s->var_values[VAR_OUT_W] = s->var_values[VAR_OW] = res;
181  if ((ret = av_expr_parse_and_eval(&res, (expr = s->h_expr),
182  var_names, s->var_values,
183  NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
184  goto fail_expr;
185  s->var_values[VAR_OUT_H] = s->var_values[VAR_OH] = res;
186  /* evaluate again ow as it may depend on oh */
187  if ((ret = av_expr_parse_and_eval(&res, (expr = s->w_expr),
188  var_names, s->var_values,
189  NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
190  goto fail_expr;
191 
192  s->var_values[VAR_OUT_W] = s->var_values[VAR_OW] = res;
193  if (normalize_double(&s->w, s->var_values[VAR_OUT_W]) < 0 ||
194  normalize_double(&s->h, s->var_values[VAR_OUT_H]) < 0) {
195  av_log(ctx, AV_LOG_ERROR,
196  "Too big value or invalid expression for out_w/ow or out_h/oh. "
197  "Maybe the expression for out_w:'%s' or for out_h:'%s' is self-referencing.\n",
198  s->w_expr, s->h_expr);
199  return AVERROR(EINVAL);
200  }
201 
202  if (!s->exact) {
203  s->w &= ~((1 << s->hsub) - 1);
204  s->h &= ~((1 << s->vsub) - 1);
205  }
206 
207  av_expr_free(s->x_pexpr);
208  av_expr_free(s->y_pexpr);
209  s->x_pexpr = s->y_pexpr = NULL;
210  if ((ret = av_expr_parse(&s->x_pexpr, s->x_expr, var_names,
211  NULL, NULL, NULL, NULL, 0, ctx)) < 0 ||
212  (ret = av_expr_parse(&s->y_pexpr, s->y_expr, var_names,
213  NULL, NULL, NULL, NULL, 0, ctx)) < 0)
214  return AVERROR(EINVAL);
215 
216  if (s->keep_aspect) {
218  (AVRational){ link->w, link->h });
219  av_reduce(&s->out_sar.num, &s->out_sar.den,
220  dar.num * s->h, dar.den * s->w, INT_MAX);
221  } else
222  s->out_sar = link->sample_aspect_ratio;
223 
224  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d sar:%d/%d -> w:%d h:%d sar:%d/%d\n",
225  link->w, link->h, link->sample_aspect_ratio.num, link->sample_aspect_ratio.den,
226  s->w, s->h, s->out_sar.num, s->out_sar.den);
227 
228  if (s->w <= 0 || s->h <= 0 ||
229  s->w > link->w || s->h > link->h) {
230  av_log(ctx, AV_LOG_ERROR,
231  "Invalid too big or non positive size for width '%d' or height '%d'\n",
232  s->w, s->h);
233  return AVERROR(EINVAL);
234  }
235 
236  /* set default, required in the case the first computed value for x/y is NAN */
237  s->x = (link->w - s->w) / 2;
238  s->y = (link->h - s->h) / 2;
239  if (!s->exact) {
240  s->x &= ~((1 << s->hsub) - 1);
241  s->y &= ~((1 << s->vsub) - 1);
242  }
243  return 0;
244 
245 fail_expr:
246  av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s'\n", expr);
247  return ret;
248 }
249 
251 {
252  CropContext *s = link->src->priv;
254 
255  if (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
256  // Hardware frames adjust the cropping regions rather than
257  // changing the frame size.
258  } else {
259  link->w = s->w;
260  link->h = s->h;
261  }
262  link->sample_aspect_ratio = s->out_sar;
263 
264  return 0;
265 }
266 
268 {
269  AVFilterContext *ctx = link->dst;
270  CropContext *s = ctx->priv;
272  int i;
273 
274  s->var_values[VAR_N] = link->frame_count_out;
275  s->var_values[VAR_T] = frame->pts == AV_NOPTS_VALUE ?
276  NAN : frame->pts * av_q2d(link->time_base);
277  s->var_values[VAR_POS] = frame->pkt_pos == -1 ?
278  NAN : frame->pkt_pos;
281  /* It is necessary if x is expressed from y */
283 
286 
287  if (s->x < 0)
288  s->x = 0;
289  if (s->y < 0)
290  s->y = 0;
291  if ((unsigned)s->x + (unsigned)s->w > link->w)
292  s->x = link->w - s->w;
293  if ((unsigned)s->y + (unsigned)s->h > link->h)
294  s->y = link->h - s->h;
295  if (!s->exact) {
296  s->x &= ~((1 << s->hsub) - 1);
297  s->y &= ~((1 << s->vsub) - 1);
298  }
299 
300  av_log(ctx, AV_LOG_TRACE, "n:%d t:%f pos:%f x:%d y:%d x+w:%d y+h:%d\n",
301  (int)s->var_values[VAR_N], s->var_values[VAR_T], s->var_values[VAR_POS],
302  s->x, s->y, s->x+s->w, s->y+s->h);
303 
304  if (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
305  frame->crop_top += s->y;
306  frame->crop_left += s->x;
307  frame->crop_bottom = frame->height - frame->crop_top - frame->crop_bottom - s->h;
308  frame->crop_right = frame->width - frame->crop_left - frame->crop_right - s->w;
309  } else {
310  frame->width = s->w;
311  frame->height = s->h;
312 
313  frame->data[0] += s->y * frame->linesize[0];
314  frame->data[0] += s->x * s->max_step[0];
315 
316  if (!(desc->flags & AV_PIX_FMT_FLAG_PAL || desc->flags & FF_PSEUDOPAL)) {
317  for (i = 1; i < 3; i ++) {
318  if (frame->data[i]) {
319  frame->data[i] += (s->y >> s->vsub) * frame->linesize[i];
320  frame->data[i] += (s->x * s->max_step[i]) >> s->hsub;
321  }
322  }
323  }
324 
325  /* alpha plane */
326  if (frame->data[3]) {
327  frame->data[3] += s->y * frame->linesize[3];
328  frame->data[3] += s->x * s->max_step[3];
329  }
330  }
331 
332  return ff_filter_frame(link->dst->outputs[0], frame);
333 }
334 
335 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
336  char *res, int res_len, int flags)
337 {
338  CropContext *s = ctx->priv;
339  int ret;
340 
341  if ( !strcmp(cmd, "out_w") || !strcmp(cmd, "w")
342  || !strcmp(cmd, "out_h") || !strcmp(cmd, "h")
343  || !strcmp(cmd, "x") || !strcmp(cmd, "y")) {
344 
345  int old_x = s->x;
346  int old_y = s->y;
347  int old_w = s->w;
348  int old_h = s->h;
349 
350  AVFilterLink *outlink = ctx->outputs[0];
351  AVFilterLink *inlink = ctx->inputs[0];
352 
353  av_opt_set(s, cmd, args, 0);
354 
355  if ((ret = config_input(inlink)) < 0) {
356  s->x = old_x;
357  s->y = old_y;
358  s->w = old_w;
359  s->h = old_h;
360  return ret;
361  }
362 
363  ret = config_output(outlink);
364 
365  } else
366  ret = AVERROR(ENOSYS);
367 
368  return ret;
369 }
370 
371 #define OFFSET(x) offsetof(CropContext, x)
372 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
373 #define TFLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
374 
375 static const AVOption crop_options[] = {
376  { "out_w", "set the width crop area expression", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, CHAR_MIN, CHAR_MAX, TFLAGS },
377  { "w", "set the width crop area expression", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, CHAR_MIN, CHAR_MAX, TFLAGS },
378  { "out_h", "set the height crop area expression", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, CHAR_MIN, CHAR_MAX, TFLAGS },
379  { "h", "set the height crop area expression", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, CHAR_MIN, CHAR_MAX, TFLAGS },
380  { "x", "set the x crop area expression", OFFSET(x_expr), AV_OPT_TYPE_STRING, {.str = "(in_w-out_w)/2"}, CHAR_MIN, CHAR_MAX, TFLAGS },
381  { "y", "set the y crop area expression", OFFSET(y_expr), AV_OPT_TYPE_STRING, {.str = "(in_h-out_h)/2"}, CHAR_MIN, CHAR_MAX, TFLAGS },
382  { "keep_aspect", "keep aspect ratio", OFFSET(keep_aspect), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
383  { "exact", "do exact cropping", OFFSET(exact), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
384  { NULL }
385 };
386 
388 
390  {
391  .name = "default",
392  .type = AVMEDIA_TYPE_VIDEO,
393  .filter_frame = filter_frame,
394  .config_props = config_input,
395  },
396  { NULL }
397 };
398 
400  {
401  .name = "default",
402  .type = AVMEDIA_TYPE_VIDEO,
403  .config_props = config_output,
404  },
405  { NULL }
406 };
407 
409  .name = "crop",
410  .description = NULL_IF_CONFIG_SMALL("Crop the input video."),
411  .priv_size = sizeof(CropContext),
412  .priv_class = &crop_class,
414  .uninit = uninit,
415  .inputs = avfilter_vf_crop_inputs,
416  .outputs = avfilter_vf_crop_outputs,
418 };
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:132
#define NULL
Definition: coverity.c:32
AVExpr * x_pexpr
Definition: vf_crop.c:90
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
#define FLAGS
Definition: vf_crop.c:372
AVOption.
Definition: opt.h:246
int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:566
const char * fmt
Definition: avisynth_c.h:861
static int config_input(AVFilterLink *link)
Definition: vf_crop.c:143
misc image utilities
Main libavfilter public API header.
int exact
exact cropping, for subsampled formats
Definition: vf_crop.c:85
const char * desc
Definition: nvenc.c:68
int num
Numerator.
Definition: rational.h:59
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:685
size_t crop_bottom
Definition: frame.h:656
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
AVFilter ff_vf_crop
Definition: vf_crop.c:408
Definition: vf_crop.c:61
void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4], const AVPixFmtDescriptor *pixdesc)
Compute the max pixel step for each plane of an image with a format described by pixdesc.
Definition: imgutils.c:35
const char * name
Pad name.
Definition: internal.h:60
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
#define av_cold
Definition: attributes.h:82
int x
x offset of the non-cropped area with respect to the input area
Definition: vf_crop.c:78
AVOptions.
int y
y offset of the non-cropped area with respect to the input area
Definition: vf_crop.c:79
size_t crop_left
Definition: frame.h:657
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:202
int max_step[4]
max pixel step for each plane, expressed as a number of bytes
Definition: vf_crop.c:87
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:388
Definition: eval.c:157
static const AVFilterPad avfilter_vf_crop_outputs[]
Definition: vf_crop.c:399
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
Definition: vf_crop.c:60
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
char * w_expr
Definition: vf_crop.c:89
static const AVFilterPad avfilter_vf_crop_inputs[]
Definition: vf_crop.c:389
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
int hsub
Definition: vf_crop.c:88
int av_expr_parse_and_eval(double *d, const char *s, const char *const *const_names, const double *const_values, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), void *opaque, int log_offset, void *log_ctx)
Parse and evaluate an expression.
Definition: eval.c:776
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
int width
Definition: frame.h:353
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:569
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
Definition: vf_crop.c:70
double var_values[VAR_VARS_NB]
Definition: vf_crop.c:91
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:140
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:336
size_t crop_top
Definition: frame.h:655
var_name
Definition: aeval.c:46
common internal API header
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
#define NAN
Definition: mathematics.h:64
char * x_expr
Definition: vf_crop.c:89
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_crop.c:335
AVRational out_sar
output sample aspect ratio
Definition: vf_crop.c:83
AVFormatContext * ctx
Definition: movenc.c:48
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
int n
Definition: avisynth_c.h:760
#define OFFSET(x)
Definition: vf_crop.c:371
AVExpr * y_pexpr
Definition: vf_crop.c:90
static int query_formats(AVFilterContext *ctx)
Definition: vf_crop.c:94
char * h_expr
Definition: vf_crop.c:89
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
int h
height of the cropped area
Definition: vf_crop.c:81
int vsub
chroma subsampling
Definition: vf_crop.c:88
Definition: vf_crop.c:68
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:336
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_crop.c:118
Definition: vf_crop.c:59
size_t crop_right
Definition: frame.h:658
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Replacements for frequently missing libm functions.
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
#define isnan(x)
Definition: libm.h:340
const char * name
Filter name.
Definition: avfilter.h:148
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
static const AVOption crop_options[]
Definition: vf_crop.c:375
#define AV_PIX_FMT_FLAG_BITSTREAM
All values of a component are bit-wise packed end to end.
Definition: pixdesc.h:136
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
Definition: vf_crop.c:62
static const char *const var_names[]
Definition: vf_crop.c:40
#define flags(name, subs,...)
Definition: cbs_av1.c:564
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
static int normalize_double(int *n, double d)
Definition: vf_crop.c:128
static int config_output(AVFilterLink *link)
Definition: vf_crop.c:250
Definition: vf_crop.c:69
AVFILTER_DEFINE_CLASS(crop)
int den
Denominator.
Definition: rational.h:60
Definition: vf_crop.c:72
#define FF_PSEUDOPAL
Definition: internal.h:367
Definition: vf_crop.c:63
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:766
int keep_aspect
keep display aspect ratio when cropping
Definition: vf_crop.c:84
#define TFLAGS
Definition: vf_crop.c:373
A list of supported formats for one end of a filter link.
Definition: formats.h:64
#define lrint
Definition: tablegen.h:53
An instance of a filter.
Definition: avfilter.h:338
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
int height
Definition: frame.h:353
formats
Definition: signature.h:48
int w
width of the cropped area
Definition: vf_crop.c:80
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:467
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:144
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
simple arithmetic expression evaluator
static int filter_frame(AVFilterLink *link, AVFrame *frame)
Definition: vf_crop.c:267
char * y_expr
Definition: vf_crop.c:89