FFmpeg
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <float.h>
27 #include <stdio.h>
28 #include <string.h>
29 
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "formats.h"
33 #include "framesync.h"
34 #include "libavutil/pixfmt.h"
35 #include "scale_eval.h"
36 #include "video.h"
37 #include "libavutil/eval.h"
39 #include "libavutil/internal.h"
40 #include "libavutil/mem.h"
41 #include "libavutil/opt.h"
42 #include "libavutil/parseutils.h"
43 #include "libavutil/pixdesc.h"
44 #include "libswscale/swscale.h"
45 
46 static const char *const var_names[] = {
47  "in_w", "iw",
48  "in_h", "ih",
49  "out_w", "ow",
50  "out_h", "oh",
51  "a",
52  "sar",
53  "dar",
54  "hsub",
55  "vsub",
56  "ohsub",
57  "ovsub",
58  "n",
59  "t",
60 #if FF_API_FRAME_PKT
61  "pos",
62 #endif
63  "ref_w", "rw",
64  "ref_h", "rh",
65  "ref_a",
66  "ref_sar",
67  "ref_dar", "rdar",
68  "ref_hsub",
69  "ref_vsub",
70  "ref_n",
71  "ref_t",
72  "ref_pos",
73  /* Legacy variables for scale2ref */
74  "main_w",
75  "main_h",
76  "main_a",
77  "main_sar",
78  "main_dar", "mdar",
79  "main_hsub",
80  "main_vsub",
81  "main_n",
82  "main_t",
83  "main_pos",
84  NULL
85 };
86 
87 enum var_name {
101 #if FF_API_FRAME_PKT
102  VAR_POS,
103 #endif
125 };
126 
127 enum EvalMode {
131 };
132 
133 typedef struct ScaleContext {
134  const AVClass *class;
137 
138  /**
139  * New dimensions. Special values are:
140  * 0 = original width/height
141  * -1 = keep original aspect
142  * -N = try to keep aspect but make sure it is divisible by N
143  */
144  int w, h;
145  char *size_str;
146  double param[2]; // sws params
147 
148  int hsub, vsub; ///< chroma subsampling
149  int slice_y; ///< top of current output slice
151  int uses_ref;
152 
153  char *w_expr; ///< width expression string
154  char *h_expr; ///< height expression string
158 
159  char *flags_str;
160 
167  int in_range;
169 
176 
180 
181  int eval_mode; ///< expression evaluation mode
182 
183 } ScaleContext;
184 
186 #define IS_SCALE2REF(ctx) ((ctx)->filter == &ff_vf_scale2ref.p)
187 
188 static int config_props(AVFilterLink *outlink);
189 
191 {
192  ScaleContext *scale = ctx->priv;
193  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
194 
195  if (!scale->w_pexpr && !scale->h_pexpr)
196  return AVERROR(EINVAL);
197 
198  if (scale->w_pexpr)
199  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
200  if (scale->h_pexpr)
201  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
202 
203  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
204  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
205  return AVERROR(EINVAL);
206  }
207 
208  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
209  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
210  return AVERROR(EINVAL);
211  }
212 
213  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
214  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
215  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
216  }
217 
218  if (vars_w[VAR_REF_W] || vars_h[VAR_REF_W] ||
219  vars_w[VAR_RW] || vars_h[VAR_RW] ||
220  vars_w[VAR_REF_H] || vars_h[VAR_REF_H] ||
221  vars_w[VAR_RH] || vars_h[VAR_RH] ||
222  vars_w[VAR_REF_A] || vars_h[VAR_REF_A] ||
223  vars_w[VAR_REF_SAR] || vars_h[VAR_REF_SAR] ||
224  vars_w[VAR_REF_DAR] || vars_h[VAR_REF_DAR] ||
225  vars_w[VAR_RDAR] || vars_h[VAR_RDAR] ||
226  vars_w[VAR_REF_HSUB] || vars_h[VAR_REF_HSUB] ||
227  vars_w[VAR_REF_VSUB] || vars_h[VAR_REF_VSUB] ||
228  vars_w[VAR_REF_N] || vars_h[VAR_REF_N] ||
229  vars_w[VAR_REF_T] || vars_h[VAR_REF_T] ||
230  vars_w[VAR_REF_POS] || vars_h[VAR_REF_POS]) {
231  scale->uses_ref = 1;
232  }
233 
234  if (!IS_SCALE2REF(ctx) &&
235  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
236  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
237  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
238  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
239  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
240  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
241  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
242  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
243  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
244  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
245  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
246  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
247  return AVERROR(EINVAL);
248  }
249 
250  if (scale->eval_mode == EVAL_MODE_INIT &&
251  (vars_w[VAR_N] || vars_h[VAR_N] ||
252  vars_w[VAR_T] || vars_h[VAR_T] ||
254  vars_w[VAR_POS] || vars_h[VAR_POS] ||
255 #endif
256  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
257  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
258  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
259  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
260  return AVERROR(EINVAL);
261  }
262 
263  return 0;
264 }
265 
266 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
267 {
268  ScaleContext *scale = ctx->priv;
269  int ret, is_inited = 0;
270  char *old_str_expr = NULL;
271  AVExpr *old_pexpr = NULL;
272 
273  if (str_expr) {
274  old_str_expr = av_strdup(str_expr);
275  if (!old_str_expr)
276  return AVERROR(ENOMEM);
277  av_opt_set(scale, var, args, 0);
278  }
279 
280  if (*pexpr_ptr) {
281  old_pexpr = *pexpr_ptr;
282  *pexpr_ptr = NULL;
283  is_inited = 1;
284  }
285 
286  ret = av_expr_parse(pexpr_ptr, args, var_names,
287  NULL, NULL, NULL, NULL, 0, ctx);
288  if (ret < 0) {
289  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
290  goto revert;
291  }
292 
293  ret = check_exprs(ctx);
294  if (ret < 0)
295  goto revert;
296 
297  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
298  goto revert;
299 
300  av_expr_free(old_pexpr);
301  old_pexpr = NULL;
302  av_freep(&old_str_expr);
303 
304  return 0;
305 
306 revert:
307  av_expr_free(*pexpr_ptr);
308  *pexpr_ptr = NULL;
309  if (old_str_expr) {
310  av_opt_set(scale, var, old_str_expr, 0);
311  av_free(old_str_expr);
312  }
313  if (old_pexpr)
314  *pexpr_ptr = old_pexpr;
315 
316  return ret;
317 }
318 
320 {
321  ScaleContext *scale = ctx->priv;
322 
323  scale->sws = sws_alloc_context();
324  if (!scale->sws)
325  return AVERROR(ENOMEM);
326 
327  // set threads=0, so we can later check whether the user modified it
328  scale->sws->threads = 0;
329 
331 
332  return 0;
333 }
334 
335 static int do_scale(FFFrameSync *fs);
336 
338 {
339  ScaleContext *scale = ctx->priv;
340  int ret;
341 
342  if (IS_SCALE2REF(ctx))
343  av_log(ctx, AV_LOG_WARNING, "scale2ref is deprecated, use scale=rw:rh instead\n");
344 
345  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
347  "Size and width/height expressions cannot be set at the same time.\n");
348  return AVERROR(EINVAL);
349  }
350 
351  if (scale->w_expr && !scale->h_expr)
352  FFSWAP(char *, scale->w_expr, scale->size_str);
353 
354  if (scale->size_str) {
355  char buf[32];
356  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
358  "Invalid size '%s'\n", scale->size_str);
359  return ret;
360  }
361  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
362  av_opt_set(scale, "w", buf, 0);
363  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
364  av_opt_set(scale, "h", buf, 0);
365  }
366  if (!scale->w_expr)
367  av_opt_set(scale, "w", "iw", 0);
368  if (!scale->h_expr)
369  av_opt_set(scale, "h", "ih", 0);
370 
371  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
372  if (ret < 0)
373  return ret;
374 
375  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
376  if (ret < 0)
377  return ret;
378 
379  if (scale->in_primaries != -1 && !sws_test_primaries(scale->in_primaries, 0)) {
380  av_log(ctx, AV_LOG_ERROR, "Unsupported input primaries '%s'\n",
381  av_color_primaries_name(scale->in_primaries));
382  return AVERROR(EINVAL);
383  }
384 
385  if (scale->out_primaries != -1 && !sws_test_primaries(scale->out_primaries, 1)) {
386  av_log(ctx, AV_LOG_ERROR, "Unsupported output primaries '%s'\n",
387  av_color_primaries_name(scale->out_primaries));
388  return AVERROR(EINVAL);
389  }
390 
391  if (scale->in_transfer != -1 && !sws_test_transfer(scale->in_transfer, 0)) {
392  av_log(ctx, AV_LOG_ERROR, "Unsupported input transfer '%s'\n",
393  av_color_transfer_name(scale->in_transfer));
394  return AVERROR(EINVAL);
395  }
396 
397  if (scale->out_transfer != -1 && !sws_test_transfer(scale->out_transfer, 1)) {
398  av_log(ctx, AV_LOG_ERROR, "Unsupported output transfer '%s'\n",
399  av_color_transfer_name(scale->out_transfer));
400  return AVERROR(EINVAL);
401  }
402 
403  if (scale->in_color_matrix != -1 && !sws_test_colorspace(scale->in_color_matrix, 0)) {
404  av_log(ctx, AV_LOG_ERROR, "Unsupported input color matrix '%s'\n",
405  av_color_space_name(scale->in_color_matrix));
406  return AVERROR(EINVAL);
407  }
408 
409  if (scale->out_color_matrix != -1 && !sws_test_colorspace(scale->out_color_matrix, 1)) {
410  av_log(ctx, AV_LOG_ERROR, "Unsupported output color matrix '%s'\n",
411  av_color_space_name(scale->out_color_matrix));
412  return AVERROR(EINVAL);
413  }
414 
415  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
416  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
417 
418  if (scale->flags_str && *scale->flags_str) {
419  ret = av_opt_set(scale->sws, "sws_flags", scale->flags_str, 0);
420  if (ret < 0)
421  return ret;
422  }
423 
424  for (int i = 0; i < FF_ARRAY_ELEMS(scale->param); i++)
425  if (scale->param[i] != DBL_MAX)
426  scale->sws->scaler_params[i] = scale->param[i];
427 
428  scale->sws->src_h_chr_pos = scale->in_h_chr_pos;
429  scale->sws->src_v_chr_pos = scale->in_v_chr_pos;
430  scale->sws->dst_h_chr_pos = scale->out_h_chr_pos;
431  scale->sws->dst_v_chr_pos = scale->out_v_chr_pos;
432 
433  // use generic thread-count if the user did not set it explicitly
434  if (!scale->sws->threads)
435  scale->sws->threads = ff_filter_get_nb_threads(ctx);
436 
437  if (!IS_SCALE2REF(ctx) && scale->uses_ref) {
438  AVFilterPad pad = {
439  .name = "ref",
440  .type = AVMEDIA_TYPE_VIDEO,
441  };
442  ret = ff_append_inpad(ctx, &pad);
443  if (ret < 0)
444  return ret;
445  }
446 
447  return 0;
448 }
449 
451 {
452  ScaleContext *scale = ctx->priv;
453  av_expr_free(scale->w_pexpr);
454  av_expr_free(scale->h_pexpr);
455  scale->w_pexpr = scale->h_pexpr = NULL;
457  sws_free_context(&scale->sws);
458 }
459 
461  AVFilterFormatsConfig **cfg_in,
462  AVFilterFormatsConfig **cfg_out)
463 {
464  const ScaleContext *scale = ctx->priv;
466  const AVPixFmtDescriptor *desc;
467  enum AVPixelFormat pix_fmt;
468  int ret;
469 
470  desc = NULL;
471  formats = NULL;
472  while ((desc = av_pix_fmt_desc_next(desc))) {
474  if (sws_test_format(pix_fmt, 0)) {
475  if ((ret = ff_add_format(&formats, pix_fmt)) < 0)
476  return ret;
477  }
478  }
479  if ((ret = ff_formats_ref(formats, &cfg_in[0]->formats)) < 0)
480  return ret;
481 
482  desc = NULL;
483  formats = NULL;
484  while ((desc = av_pix_fmt_desc_next(desc))) {
487  if ((ret = ff_add_format(&formats, pix_fmt)) < 0)
488  return ret;
489  }
490  }
491  if ((ret = ff_formats_ref(formats, &cfg_out[0]->formats)) < 0)
492  return ret;
493 
494  /* accept all supported inputs, even if user overrides their properties */
496  for (int i = 0; i < formats->nb_formats; i++) {
497  if (!sws_test_colorspace(formats->formats[i], 0)) {
498  for (int j = i--; j + 1 < formats->nb_formats; j++)
499  formats->formats[j] = formats->formats[j + 1];
500  formats->nb_formats--;
501  }
502  }
503  if ((ret = ff_formats_ref(formats, &cfg_in[0]->color_spaces)) < 0)
504  return ret;
505 
507  &cfg_in[0]->color_ranges)) < 0)
508  return ret;
509 
510  /* propagate output properties if overridden */
511  if (scale->out_color_matrix != AVCOL_SPC_UNSPECIFIED) {
512  formats = ff_make_formats_list_singleton(scale->out_color_matrix);
513  } else {
515  for (int i = 0; i < formats->nb_formats; i++) {
516  if (!sws_test_colorspace(formats->formats[i], 1)) {
517  for (int j = i--; j + 1 < formats->nb_formats; j++)
518  formats->formats[j] = formats->formats[j + 1];
519  formats->nb_formats--;
520  }
521  }
522  }
523  if ((ret = ff_formats_ref(formats, &cfg_out[0]->color_spaces)) < 0)
524  return ret;
525 
526  formats = scale->out_range != AVCOL_RANGE_UNSPECIFIED
529  if ((ret = ff_formats_ref(formats, &cfg_out[0]->color_ranges)) < 0)
530  return ret;
531 
532  return 0;
533 }
534 
536 {
537  ScaleContext *scale = ctx->priv;
538  const char scale2ref = IS_SCALE2REF(ctx);
539  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
540  const AVFilterLink *outlink = ctx->outputs[0];
542  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
543  char *expr;
544  int eval_w, eval_h;
545  int ret;
546  double res;
547  const AVPixFmtDescriptor *main_desc;
548  const AVFilterLink *main_link;
549 
550  if (scale2ref) {
551  main_link = ctx->inputs[0];
552  main_desc = av_pix_fmt_desc_get(main_link->format);
553  }
554 
555  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
556  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
557  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
558  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
559  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
560  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
561  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
562  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
563  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
564  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
565  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
566  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
567 
568  if (scale2ref) {
569  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
570  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
571  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
572  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
573  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
574  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
575  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
576  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
577  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
578  }
579 
580  if (scale->uses_ref) {
581  const AVFilterLink *reflink = ctx->inputs[1];
582  const AVPixFmtDescriptor *ref_desc = av_pix_fmt_desc_get(reflink->format);
583  scale->var_values[VAR_REF_W] = scale->var_values[VAR_RW] = reflink->w;
584  scale->var_values[VAR_REF_H] = scale->var_values[VAR_RH] = reflink->h;
585  scale->var_values[VAR_REF_A] = (double) reflink->w / reflink->h;
586  scale->var_values[VAR_REF_SAR] = reflink->sample_aspect_ratio.num ?
587  (double) reflink->sample_aspect_ratio.num / reflink->sample_aspect_ratio.den : 1;
588  scale->var_values[VAR_REF_DAR] = scale->var_values[VAR_RDAR] =
589  scale->var_values[VAR_REF_A] * scale->var_values[VAR_REF_SAR];
590  scale->var_values[VAR_REF_HSUB] = 1 << ref_desc->log2_chroma_w;
591  scale->var_values[VAR_REF_VSUB] = 1 << ref_desc->log2_chroma_h;
592  }
593 
594  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
595  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
596 
597  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
598  if (isnan(res)) {
599  expr = scale->h_expr;
600  ret = AVERROR(EINVAL);
601  goto fail;
602  }
603  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
604 
605  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
606  if (isnan(res)) {
607  expr = scale->w_expr;
608  ret = AVERROR(EINVAL);
609  goto fail;
610  }
611  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
612 
613  scale->w = eval_w;
614  scale->h = eval_h;
615 
616  return 0;
617 
618 fail:
620  "Error when evaluating the expression '%s'.\n", expr);
621  return ret;
622 }
623 
624 static int config_props(AVFilterLink *outlink)
625 {
626  AVFilterContext *ctx = outlink->src;
627  AVFilterLink *inlink0 = outlink->src->inputs[0];
629  outlink->src->inputs[1] :
630  outlink->src->inputs[0];
631  ScaleContext *scale = ctx->priv;
632  uint8_t *flags_val = NULL;
633  double w_adj = 1.0;
634  int ret;
635 
636  if ((ret = scale_eval_dimensions(ctx)) < 0)
637  goto fail;
638 
639  outlink->w = scale->w;
640  outlink->h = scale->h;
641 
642  if (scale->reset_sar)
643  w_adj = IS_SCALE2REF(ctx) ? scale->var_values[VAR_S2R_MAIN_SAR] :
644  scale->var_values[VAR_SAR];
645 
646  ret = ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
647  scale->force_original_aspect_ratio,
648  scale->force_divisible_by, w_adj);
649 
650  if (ret < 0)
651  goto fail;
652 
653  if (outlink->w > INT_MAX ||
654  outlink->h > INT_MAX ||
655  (outlink->h * inlink->w) > INT_MAX ||
656  (outlink->w * inlink->h) > INT_MAX)
657  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
658 
659  /* TODO: make algorithm configurable */
660 
661  if (scale->reset_sar)
662  outlink->sample_aspect_ratio = (AVRational){1, 1};
663  else if (inlink0->sample_aspect_ratio.num){
664  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
665  } else
666  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
667 
668  av_opt_get(scale->sws, "sws_flags", 0, &flags_val);
669  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d -> w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d flags:%s\n",
670  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
671  av_color_space_name(inlink->colorspace), av_color_range_name(inlink->color_range),
672  inlink->sample_aspect_ratio.num, inlink->sample_aspect_ratio.den,
673  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
675  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
676  flags_val);
677  av_freep(&flags_val);
678 
679  if (inlink->w != outlink->w || inlink->h != outlink->h) {
682  }
683 
684  if (scale->in_primaries != scale->out_primaries || scale->in_transfer != scale->out_transfer) {
687  }
688 
689  if (!IS_SCALE2REF(ctx)) {
691  ret = ff_framesync_init(&scale->fs, ctx, ctx->nb_inputs);
692  if (ret < 0)
693  return ret;
694  scale->fs.on_event = do_scale;
695  scale->fs.in[0].time_base = ctx->inputs[0]->time_base;
696  scale->fs.in[0].sync = 1;
697  scale->fs.in[0].before = EXT_STOP;
698  scale->fs.in[0].after = EXT_STOP;
699  if (scale->uses_ref) {
700  av_assert0(ctx->nb_inputs == 2);
701  scale->fs.in[1].time_base = ctx->inputs[1]->time_base;
702  scale->fs.in[1].sync = 0;
703  scale->fs.in[1].before = EXT_NULL;
704  scale->fs.in[1].after = EXT_INFINITY;
705  }
706 
708  if (ret < 0)
709  return ret;
710  }
711 
712  return 0;
713 
714 fail:
715  return ret;
716 }
717 
718 static int config_props_ref(AVFilterLink *outlink)
719 {
720  AVFilterLink *inlink = outlink->src->inputs[1];
722  FilterLink *ol = ff_filter_link(outlink);
723 
724  outlink->w = inlink->w;
725  outlink->h = inlink->h;
726  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
727  outlink->time_base = inlink->time_base;
728  ol->frame_rate = il->frame_rate;
729  outlink->colorspace = inlink->colorspace;
730  outlink->color_range = inlink->color_range;
731 
732  return 0;
733 }
734 
735 static int request_frame(AVFilterLink *outlink)
736 {
737  return ff_request_frame(outlink->src->inputs[0]);
738 }
739 
740 static int request_frame_ref(AVFilterLink *outlink)
741 {
742  return ff_request_frame(outlink->src->inputs[1]);
743 }
744 
745 /* Takes over ownership of *frame_in, passes ownership of *frame_out to caller */
746 static int scale_frame(AVFilterLink *link, AVFrame **frame_in,
747  AVFrame **frame_out)
748 {
750  AVFilterContext *ctx = link->dst;
751  ScaleContext *scale = ctx->priv;
752  AVFilterLink *outlink = ctx->outputs[0];
753  AVFrame *out, *in = *frame_in;
755  char buf[32];
756  int ret, flags_orig, frame_changed;
757 
758  *frame_in = NULL;
759 
760  frame_changed = in->width != link->w ||
761  in->height != link->h ||
762  in->format != link->format ||
765  in->colorspace != link->colorspace ||
766  in->color_range != link->color_range;
767 
768  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
769  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
770 
771  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
772  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
773 
774  if (scale->eval_mode == EVAL_MODE_FRAME &&
775  !frame_changed &&
776  !IS_SCALE2REF(ctx) &&
777  !(vars_w[VAR_N] || vars_w[VAR_T]
779  || vars_w[VAR_POS]
780 #endif
781  ) &&
782  !(vars_h[VAR_N] || vars_h[VAR_T]
784  || vars_h[VAR_POS]
785 #endif
786  ) &&
787  scale->w && scale->h)
788  goto scale;
789 
790  if (scale->eval_mode == EVAL_MODE_INIT) {
791  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
792  av_opt_set(scale, "w", buf, 0);
793  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
794  av_opt_set(scale, "h", buf, 0);
795 
796  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
797  if (ret < 0)
798  goto err;
799 
800  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
801  if (ret < 0)
802  goto err;
803  }
804 
805  if (IS_SCALE2REF(ctx)) {
806  scale->var_values[VAR_S2R_MAIN_N] = inl->frame_count_out;
807  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
808 #if FF_API_FRAME_PKT
810  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
812 #endif
813  } else {
814  scale->var_values[VAR_N] = inl->frame_count_out;
815  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
816 #if FF_API_FRAME_PKT
818  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
820 #endif
821  }
822 
823  link->dst->inputs[0]->format = in->format;
824  link->dst->inputs[0]->w = in->width;
825  link->dst->inputs[0]->h = in->height;
826  link->dst->inputs[0]->colorspace = in->colorspace;
827  link->dst->inputs[0]->color_range = in->color_range;
828 
829  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
830  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
831 
832  if ((ret = config_props(outlink)) < 0)
833  goto err;
834  }
835 
836 scale:
837  scale->hsub = desc->log2_chroma_w;
838  scale->vsub = desc->log2_chroma_h;
839 
840  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
841  if (!out) {
842  ret = AVERROR(ENOMEM);
843  goto err;
844  }
845 
846  if (scale->in_color_matrix != -1)
847  in->colorspace = scale->in_color_matrix;
848  if (scale->in_primaries != -1)
849  in->color_primaries = scale->in_primaries;
850  if (scale->in_transfer != -1)
851  in->color_trc = scale->in_transfer;
852  if (scale->in_range != AVCOL_RANGE_UNSPECIFIED)
853  in->color_range = scale->in_range;
854  in->chroma_location = scale->in_chroma_loc;
855 
856  flags_orig = in->flags;
857  if (scale->interlaced > 0)
859  else if (!scale->interlaced)
861 
863  out->width = outlink->w;
864  out->height = outlink->h;
865  out->color_range = outlink->color_range;
866  out->colorspace = outlink->colorspace;
867  if (scale->out_chroma_loc != AVCHROMA_LOC_UNSPECIFIED)
868  out->chroma_location = scale->out_chroma_loc;
869  if (scale->out_primaries != -1)
870  out->color_primaries = scale->out_primaries;
871  if (scale->out_transfer != -1)
872  out->color_trc = scale->out_transfer;
873 
874  if (out->width != in->width || out->height != in->height) {
875  av_frame_side_data_remove_by_props(&out->side_data, &out->nb_side_data,
877  }
878 
879  if (in->color_primaries != out->color_primaries || in->color_trc != out->color_trc) {
880  av_frame_side_data_remove_by_props(&out->side_data, &out->nb_side_data,
882  }
883 
884  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
885  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
886  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
887  INT_MAX);
888 
889  if (sws_is_noop(out, in)) {
890  av_frame_free(&out);
891  in->flags = flags_orig;
892  *frame_out = in;
893  return 0;
894  }
895 
896  if (out->format == AV_PIX_FMT_PAL8) {
897  out->format = AV_PIX_FMT_BGR8;
898  avpriv_set_systematic_pal2((uint32_t*) out->data[1], out->format);
899  }
900 
901  ret = sws_scale_frame(scale->sws, out, in);
902  av_frame_free(&in);
903  out->flags = flags_orig;
904  out->format = outlink->format; /* undo PAL8 handling */
905  if (ret < 0)
906  av_frame_free(&out);
907  *frame_out = out;
908  return ret;
909 
910 err:
911  av_frame_free(&in);
912  return ret;
913 }
914 
915 static int do_scale(FFFrameSync *fs)
916 {
917  AVFilterContext *ctx = fs->parent;
918  ScaleContext *scale = ctx->priv;
919  AVFilterLink *outlink = ctx->outputs[0];
920  AVFrame *out, *in = NULL, *ref = NULL;
921  int ret = 0, frame_changed;
922 
923  ret = ff_framesync_get_frame(fs, 0, &in, 1);
924  if (ret < 0)
925  goto err;
926 
927  if (scale->uses_ref) {
928  ret = ff_framesync_get_frame(fs, 1, &ref, 0);
929  if (ret < 0)
930  goto err;
931  }
932 
933  if (ref) {
934  AVFilterLink *reflink = ctx->inputs[1];
935  FilterLink *rl = ff_filter_link(reflink);
936 
937  frame_changed = ref->width != reflink->w ||
938  ref->height != reflink->h ||
939  ref->format != reflink->format ||
940  ref->sample_aspect_ratio.den != reflink->sample_aspect_ratio.den ||
941  ref->sample_aspect_ratio.num != reflink->sample_aspect_ratio.num ||
942  ref->colorspace != reflink->colorspace ||
943  ref->color_range != reflink->color_range;
944 
945  if (frame_changed) {
946  reflink->format = ref->format;
947  reflink->w = ref->width;
948  reflink->h = ref->height;
949  reflink->sample_aspect_ratio.num = ref->sample_aspect_ratio.num;
950  reflink->sample_aspect_ratio.den = ref->sample_aspect_ratio.den;
951  reflink->colorspace = ref->colorspace;
952  reflink->color_range = ref->color_range;
953 
954  ret = config_props(outlink);
955  if (ret < 0)
956  goto err;
957  }
958 
959  if (scale->eval_mode == EVAL_MODE_FRAME) {
960  scale->var_values[VAR_REF_N] = rl->frame_count_out;
961  scale->var_values[VAR_REF_T] = TS2T(ref->pts, reflink->time_base);
962 #if FF_API_FRAME_PKT
964  scale->var_values[VAR_REF_POS] = ref->pkt_pos == -1 ? NAN : ref->pkt_pos;
966 #endif
967  }
968  }
969 
970  ret = scale_frame(ctx->inputs[0], &in, &out);
971  if (ret < 0)
972  goto err;
973 
974  av_assert0(out);
975  out->pts = av_rescale_q(fs->pts, fs->time_base, outlink->time_base);
976  return ff_filter_frame(outlink, out);
977 
978 err:
979  av_frame_free(&in);
980  return ret;
981 }
982 
984 {
985  AVFilterContext *ctx = link->dst;
986  AVFilterLink *outlink = ctx->outputs[0];
987  AVFrame *out;
988  int ret;
989 
990  ret = scale_frame(link, &in, &out);
991  if (out)
992  return ff_filter_frame(outlink, out);
993 
994  return ret;
995 }
996 
998 {
1000  ScaleContext *scale = link->dst->priv;
1001  AVFilterLink *outlink = link->dst->outputs[1];
1002  int frame_changed;
1003 
1004  frame_changed = in->width != link->w ||
1005  in->height != link->h ||
1006  in->format != link->format ||
1009  in->colorspace != link->colorspace ||
1010  in->color_range != link->color_range;
1011 
1012  if (frame_changed) {
1013  link->format = in->format;
1014  link->w = in->width;
1015  link->h = in->height;
1018  link->colorspace = in->colorspace;
1019  link->color_range = in->color_range;
1020 
1021  config_props_ref(outlink);
1022  }
1023 
1024  if (scale->eval_mode == EVAL_MODE_FRAME) {
1025  scale->var_values[VAR_N] = l->frame_count_out;
1026  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
1027 #if FF_API_FRAME_PKT
1029  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
1031 #endif
1032  }
1033 
1034  return ff_filter_frame(outlink, in);
1035 }
1036 
1037 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
1038  char *res, int res_len, int flags)
1039 {
1040  ScaleContext *scale = ctx->priv;
1041  char *str_expr;
1042  AVExpr **pexpr_ptr;
1043  int ret, w, h;
1044 
1045  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
1046  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
1047 
1048  if (w || h) {
1049  str_expr = w ? scale->w_expr : scale->h_expr;
1050  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
1051 
1052  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
1053  } else
1054  ret = AVERROR(ENOSYS);
1055 
1056  if (ret < 0)
1057  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
1058 
1059  return ret;
1060 }
1061 
1063 {
1064  ScaleContext *scale = ctx->priv;
1065  return ff_framesync_activate(&scale->fs);
1066 }
1067 
1068 static const AVClass *child_class_iterate(void **iter)
1069 {
1070  switch ((uintptr_t) *iter) {
1071  case 0:
1072  *iter = (void*)(uintptr_t) 1;
1073  return sws_get_class();
1074  case 1:
1075  *iter = (void*)(uintptr_t) 2;
1076  return &ff_framesync_class;
1077  }
1078 
1079  return NULL;
1080 }
1081 
1082 static void *child_next(void *obj, void *prev)
1083 {
1084  ScaleContext *s = obj;
1085  if (!prev)
1086  return s->sws;
1087  if (prev == s->sws)
1088  return &s->fs;
1089  return NULL;
1090 }
1091 
1092 #define OFFSET(x) offsetof(ScaleContext, x)
1093 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
1094 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
1095 
1096 static const AVOption scale_options[] = {
1097  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1098  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1099  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1100  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1101  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "" }, .flags = FLAGS },
1102  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
1103  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1104  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1105  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color" },
1106  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, 0, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color"},
1107  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=-1}, 0, 0, FLAGS, .unit = "color" },
1108  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_BT470BG}, 0, 0, FLAGS, .unit = "color" },
1109  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_BT470BG}, 0, 0, FLAGS, .unit = "color" },
1110  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_BT470BG}, 0, 0, FLAGS, .unit = "color" },
1111  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_BT709}, 0, 0, FLAGS, .unit = "color" },
1112  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_FCC}, 0, 0, FLAGS, .unit = "color" },
1113  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_SMPTE240M}, 0, 0, FLAGS, .unit = "color" },
1114  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_SPC_BT2020_NCL}, 0, 0, FLAGS, .unit = "color" },
1115  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1116  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1117  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1118  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1119  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1120  { "limited", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1121  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1122  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1123  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1124  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1125  { "in_chroma_loc", "set input chroma sample location", OFFSET(in_chroma_loc), AV_OPT_TYPE_INT, { .i64 = AVCHROMA_LOC_UNSPECIFIED }, 0, AVCHROMA_LOC_NB-1, .flags = FLAGS, .unit = "chroma_loc" },
1126  { "out_chroma_loc", "set output chroma sample location", OFFSET(out_chroma_loc), AV_OPT_TYPE_INT, { .i64 = AVCHROMA_LOC_UNSPECIFIED }, 0, AVCHROMA_LOC_NB-1, .flags = FLAGS, .unit = "chroma_loc" },
1127  {"auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_UNSPECIFIED}, 0, 0, FLAGS, .unit = "chroma_loc"},
1128  {"unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_UNSPECIFIED}, 0, 0, FLAGS, .unit = "chroma_loc"},
1129  {"left", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_LEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1130  {"center", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_CENTER}, 0, 0, FLAGS, .unit = "chroma_loc"},
1131  {"topleft", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_TOPLEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1132  {"top", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_TOP}, 0, 0, FLAGS, .unit = "chroma_loc"},
1133  {"bottomleft", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_BOTTOMLEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1134  {"bottom", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_BOTTOM}, 0, 0, FLAGS, .unit = "chroma_loc"},
1135  { "in_primaries", "set input primaries", OFFSET(in_primaries), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_PRI_NB-1, .flags = FLAGS, .unit = "primaries" },
1136  { "out_primaries", "set output primaries", OFFSET(out_primaries), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_PRI_NB-1, .flags = FLAGS, .unit = "primaries"},
1137  {"auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=-1}, 0, 0, FLAGS, .unit = "primaries"},
1138  {"bt709", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_BT709}, 0, 0, FLAGS, .unit = "primaries"},
1139  {"bt470m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_BT470M}, 0, 0, FLAGS, .unit = "primaries"},
1140  {"bt470bg", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_BT470BG}, 0, 0, FLAGS, .unit = "primaries"},
1141  {"smpte170m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_SMPTE170M}, 0, 0, FLAGS, .unit = "primaries"},
1142  {"smpte240m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_SMPTE240M}, 0, 0, FLAGS, .unit = "primaries"},
1143  {"film", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_FILM}, 0, 0, FLAGS, .unit = "primaries"},
1144  {"bt2020", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_BT2020}, 0, 0, FLAGS, .unit = "primaries"},
1145  {"smpte428", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_SMPTE428}, 0, 0, FLAGS, .unit = "primaries"},
1146  {"smpte431", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_SMPTE431}, 0, 0, FLAGS, .unit = "primaries"},
1147  {"smpte432", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_SMPTE432}, 0, 0, FLAGS, .unit = "primaries"},
1148  {"jedec-p22", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_JEDEC_P22}, 0, 0, FLAGS, .unit = "primaries"},
1149  {"ebu3213", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_PRI_EBU3213}, 0, 0, FLAGS, .unit = "primaries"},
1150  { "in_transfer", "set output color transfer", OFFSET(in_transfer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_TRC_NB-1, .flags = FLAGS, .unit = "transfer"},
1151  {"out_transfer", "set output color transfer", OFFSET(out_transfer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_TRC_NB-1, .flags = FLAGS, .unit = "transfer"},
1152  {"auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=-1}, 0, 0, FLAGS, .unit = "transfer"},
1153  {"bt709", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_BT709}, 0, 0, FLAGS, .unit = "transfer"},
1154  {"bt470m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_GAMMA22}, 0, 0, FLAGS, .unit = "transfer"},
1155  {"gamma22", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_GAMMA22}, 0, 0, FLAGS, .unit = "transfer"},
1156  {"bt470bg", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_GAMMA28}, 0, 0, FLAGS, .unit = "transfer"},
1157  {"gamma28", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_GAMMA28}, 0, 0, FLAGS, .unit = "transfer"},
1158  {"smpte170m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_SMPTE170M}, 0, 0, FLAGS, .unit = "transfer"},
1159  {"smpte240m", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_SMPTE240M}, 0, 0, FLAGS, .unit = "transfer"},
1160  {"linear", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_LINEAR}, 0, 0, FLAGS, .unit = "transfer"},
1161  {"iec61966-2-1", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_IEC61966_2_1}, 0, 0, FLAGS, .unit = "transfer"},
1162  {"srgb", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_IEC61966_2_1}, 0, 0, FLAGS, .unit = "transfer"},
1163  {"iec61966-2-4", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_IEC61966_2_4}, 0, 0, FLAGS, .unit = "transfer"},
1164  {"xvycc", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_IEC61966_2_4}, 0, 0, FLAGS, .unit = "transfer"},
1165  {"bt1361e", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_BT1361_ECG}, 0, 0, FLAGS, .unit = "transfer"},
1166  {"bt2020-10", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_BT2020_10}, 0, 0, FLAGS, .unit = "transfer"},
1167  {"bt2020-12", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_BT2020_12}, 0, 0, FLAGS, .unit = "transfer"},
1168  {"smpte2084", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_SMPTE2084}, 0, 0, FLAGS, .unit = "transfer"},
1169  {"smpte428", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_SMPTE428}, 0, 0, FLAGS, .unit = "transfer"},
1170  {"arib-std-b67", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCOL_TRC_ARIB_STD_B67}, 0, 0, FLAGS, .unit = "transfer"},
1171  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1172  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1173  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1174  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1175  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, .unit = "force_oar" },
1176  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1177  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1178  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1179  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
1180  { "reset_sar", "reset SAR to 1 and scale to square pixels if scaling proportionally", OFFSET(reset_sar), AV_OPT_TYPE_BOOL, { .i64 = 0}, 0, 1, FLAGS },
1181  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1182  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1183  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1184  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
1185  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
1186  { NULL }
1187 };
1188 
1189 static const AVClass scale_class = {
1190  .class_name = "scale",
1191  .item_name = av_default_item_name,
1192  .option = scale_options,
1193  .version = LIBAVUTIL_VERSION_INT,
1194  .category = AV_CLASS_CATEGORY_FILTER,
1195  .child_class_iterate = child_class_iterate,
1197 };
1198 
1200  {
1201  .name = "default",
1202  .type = AVMEDIA_TYPE_VIDEO,
1203  },
1204 };
1205 
1207  {
1208  .name = "default",
1209  .type = AVMEDIA_TYPE_VIDEO,
1210  .config_props = config_props,
1211  },
1212 };
1213 
1215  .p.name = "scale",
1216  .p.description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
1217  .p.priv_class = &scale_class,
1218  .p.flags = AVFILTER_FLAG_DYNAMIC_INPUTS,
1219  .preinit = preinit,
1220  .init = init,
1221  .uninit = uninit,
1222  .priv_size = sizeof(ScaleContext),
1226  .activate = activate,
1227  .process_command = process_command,
1228 };
1229 
1230 static const AVClass *scale2ref_child_class_iterate(void **iter)
1231 {
1232  const AVClass *c = *iter ? NULL : sws_get_class();
1233  *iter = (void*)(uintptr_t)c;
1234  return c;
1235 }
1236 
1237 static void *scale2ref_child_next(void *obj, void *prev)
1238 {
1239  ScaleContext *s = obj;
1240  if (!prev)
1241  return s->sws;
1242  return NULL;
1243 }
1244 
1245 static const AVClass scale2ref_class = {
1246  .class_name = "scale(2ref)",
1247  .item_name = av_default_item_name,
1248  .option = scale_options,
1249  .version = LIBAVUTIL_VERSION_INT,
1250  .category = AV_CLASS_CATEGORY_FILTER,
1251  .child_class_iterate = scale2ref_child_class_iterate,
1253 };
1254 
1256  {
1257  .name = "default",
1258  .type = AVMEDIA_TYPE_VIDEO,
1259  .filter_frame = filter_frame,
1260  },
1261  {
1262  .name = "ref",
1263  .type = AVMEDIA_TYPE_VIDEO,
1264  .filter_frame = filter_frame_ref,
1265  },
1266 };
1267 
1269  {
1270  .name = "default",
1271  .type = AVMEDIA_TYPE_VIDEO,
1272  .config_props = config_props,
1273  .request_frame= request_frame,
1274  },
1275  {
1276  .name = "ref",
1277  .type = AVMEDIA_TYPE_VIDEO,
1278  .config_props = config_props_ref,
1279  .request_frame= request_frame_ref,
1280  },
1281 };
1282 
1284  .p.name = "scale2ref",
1285  .p.description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1286  .p.priv_class = &scale2ref_class,
1287  .preinit = preinit,
1288  .init = init,
1289  .uninit = uninit,
1290  .priv_size = sizeof(ScaleContext),
1294  .process_command = process_command,
1295 };
filter_frame_ref
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:997
ScaleContext::param
double param[2]
Definition: vf_scale.c:146
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:117
formats
formats
Definition: signature.h:47
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:116
ScaleContext::fs
FFFrameSync fs
Definition: vf_scale.c:136
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:701
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:116
VAR_HSUB
@ VAR_HSUB
Definition: vf_scale.c:95
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:718
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:137
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:697
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
TFLAGS
#define TFLAGS
Definition: vf_scale.c:1094
AVCOL_PRI_EBU3213
@ AVCOL_PRI_EBU3213
EBU Tech. 3213-E (nothing there) / one of JEDEC P22 group phosphors.
Definition: pixfmt.h:635
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:190
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
var_name
var_name
Definition: noise.c:47
ScaleContext::in_primaries
int in_primaries
Definition: vf_scale.c:163
VAR_REF_POS
@ VAR_REF_POS
Definition: vf_scale.c:113
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:301
out
FILE * out
Definition: movenc.c:55
ScaleContext
Definition: vf_scale.c:133
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1078
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3341
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:653
AVCHROMA_LOC_BOTTOM
@ AVCHROMA_LOC_BOTTOM
Definition: pixfmt.h:776
ScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale.c:178
VAR_REF_N
@ VAR_REF_N
Definition: vf_scale.c:111
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:269
avfilter_vf_scale2ref_outputs
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1268
FLAGS
#define FLAGS
Definition: vf_scale.c:1093
int64_t
long long int64_t
Definition: coverity.c:34
ScaleContext::flags_str
char * flags_str
Definition: vf_scale.c:159
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:699
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:70
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:708
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
AVCOL_TRC_NB
@ AVCOL_TRC_NB
Not part of ABI.
Definition: pixfmt.h:666
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:522
AVFrame::width
int width
Definition: frame.h:482
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:750
VAR_A
@ VAR_A
Definition: vf_scale.c:92
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:740
sws_test_primaries
int sws_test_primaries(enum AVColorPrimaries prim, int output)
Test if a given set of color primaries is supported.
Definition: format.c:525
AVOption
AVOption.
Definition: opt.h:429
AVCOL_SPC_NB
@ AVCOL_SPC_NB
Not part of ABI.
Definition: pixfmt.h:693
scale_parse_expr
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:266
scale2ref_class
static const AVClass scale2ref_class
Definition: vf_scale.c:1245
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:735
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:3348
VAR_REF_T
@ VAR_REF_T
Definition: vf_scale.c:112
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:491
VAR_S2R_MAIN_HSUB
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:119
ScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale.c:157
ScaleContext::out_range
int out_range
Definition: vf_scale.c:168
AVCOL_PRI_JEDEC_P22
@ AVCOL_PRI_JEDEC_P22
Definition: pixfmt.h:636
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale.c:118
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:225
float.h
AVCOL_TRC_BT2020_12
@ AVCOL_TRC_BT2020_12
ITU-R BT2020 for 12-bit system.
Definition: pixfmt.h:660
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale.c:129
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:115
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:690
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:203
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
ScaleContext::in_h_chr_pos
int in_h_chr_pos
Definition: vf_scale.c:174
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale.c:91
video.h
ff_make_formats_list_singleton
AVFilterFormats * ff_make_formats_list_singleton(int fmt)
Equivalent to ff_make_format_list({const int[]}{ fmt, -1 })
Definition: formats.c:529
ScaleContext::out_chroma_loc
int out_chroma_loc
Definition: vf_scale.c:171
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:123
AVFrame::chroma_location
enum AVChromaLocation chroma_location
Definition: frame.h:710
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:679
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
ff_append_inpad
int ff_append_inpad(AVFilterContext *f, AVFilterPad *p)
Append a new input/output pad to the filter's list of such pads.
Definition: avfilter.c:127
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:658
av_color_space_name
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:3717
VAR_DAR
@ VAR_DAR
Definition: vf_scale.c:94
avfilter_vf_scale_inputs
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:1199
fail
#define fail()
Definition: checkasm.h:193
VARS_NB
@ VARS_NB
Definition: vf_scale.c:124
AVCOL_TRC_GAMMA28
@ AVCOL_TRC_GAMMA28
also ITU-R BT470BG
Definition: pixfmt.h:650
VAR_REF_A
@ VAR_REF_A
Definition: vf_scale.c:106
ScaleContext::eval_mode
int eval_mode
expression evaluation mode
Definition: vf_scale.c:181
EXT_NULL
@ EXT_NULL
Ignore this stream and continue processing the other ones.
Definition: framesync.h:70
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale.c:89
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale.c:130
ScaleContext::in_chroma_loc
int in_chroma_loc
Definition: vf_scale.c:170
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for SwsContext.
Definition: options.c:106
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:835
VAR_REF_W
@ VAR_REF_W
Definition: vf_scale.c:104
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:139
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
OFFSET
#define OFFSET(x)
Definition: vf_scale.c:1092
AV_SIDE_DATA_PROP_SIZE_DEPENDENT
@ AV_SIDE_DATA_PROP_SIZE_DEPENDENT
Side data depends on the video dimensions.
Definition: frame.h:292
AVCOL_TRC_GAMMA22
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:649
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: vf_scale.c:319
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
activate
static int activate(AVFilterContext *ctx)
Definition: vf_scale.c:1062
AV_PIX_FMT_BGR8
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:90
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_REF_H
@ VAR_REF_H
Definition: vf_scale.c:105
scale2ref_child_next
static void * scale2ref_child_next(void *obj, void *prev)
Definition: vf_scale.c:1237
FFFilter
Definition: filters.h:265
ScaleContext::reset_sar
int reset_sar
Definition: vf_scale.c:179
s
#define s(width, name)
Definition: cbs_vp9.c:198
VAR_OH
@ VAR_OH
Definition: vf_scale.c:91
AVCHROMA_LOC_TOP
@ AVCHROMA_LOC_TOP
Definition: pixfmt.h:774
AVCOL_PRI_NB
@ AVCOL_PRI_NB
Not part of ABI.
Definition: pixfmt.h:637
ScaleContext::out_primaries
int out_primaries
Definition: vf_scale.c:164
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:114
AVCOL_TRC_BT1361_ECG
@ AVCOL_TRC_BT1361_ECG
ITU-R BT1361 Extended Colour Gamut.
Definition: pixfmt.h:657
ScaleContext::slice_y
int slice_y
top of current output slice
Definition: vf_scale.c:149
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Underlying C type is double.
Definition: opt.h:267
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
query_formats
static int query_formats(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out)
Definition: vf_scale.c:460
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_scale.c:337
VAR_OVSUB
@ VAR_OVSUB
Definition: vf_scale.c:98
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
filters.h
ScaleContext::uses_ref
int uses_ref
Definition: vf_scale.c:151
sws_test_colorspace
int sws_test_colorspace(enum AVColorSpace csp, int output)
Test if a given color space is supported.
Definition: format.c:508
ctx
AVFormatContext * ctx
Definition: movenc.c:49
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:1037
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
AVCOL_PRI_SMPTE428
@ AVCOL_PRI_SMPTE428
SMPTE ST 428-1 (CIE 1931 XYZ)
Definition: pixfmt.h:631
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVExpr
Definition: eval.c:158
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
AVCOL_PRI_SMPTE240M
@ AVCOL_PRI_SMPTE240M
identical to above, also called "SMPTE C" even though it uses D65
Definition: pixfmt.h:628
ScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale.c:155
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
avpriv_set_systematic_pal2
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:178
NAN
#define NAN
Definition: mathematics.h:115
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:626
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:627
ScaleContext::out_h_chr_pos
int out_h_chr_pos
Definition: vf_scale.c:172
av_color_range_name
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:3657
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
VAR_REF_DAR
@ VAR_REF_DAR
Definition: vf_scale.c:108
ff_framesync_class
const AVClass ff_framesync_class
Definition: framesync.c:54
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
ScaleContext::out_v_chr_pos
int out_v_chr_pos
Definition: vf_scale.c:173
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:633
VAR_POS
@ VAR_POS
Definition: noise.c:56
VAR_T
@ VAR_T
Definition: vf_scale.c:100
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:200
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:771
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:773
sws_is_noop
int sws_is_noop(const AVFrame *dst, const AVFrame *src)
Check if a given conversion is a noop.
Definition: format.c:572
AVCOL_TRC_IEC61966_2_4
@ AVCOL_TRC_IEC61966_2_4
IEC 61966-2-4.
Definition: pixfmt.h:656
isnan
#define isnan(x)
Definition: libm.h:342
scale2ref_child_class_iterate
static const AVClass * scale2ref_child_class_iterate(void **iter)
Definition: vf_scale.c:1230
ScaleContext::in_range
int in_range
Definition: vf_scale.c:167
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:265
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale.c:88
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:621
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:239
ff_add_format
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:504
parseutils.h
sws_test_format
int sws_test_format(enum AVPixelFormat format, int output)
Test if a given pixel format is supported.
Definition: format.c:503
ScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale.c:156
av_color_primaries_name
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
Definition: pixdesc.c:3675
double
double
Definition: af_crystalizer.c:132
AVCOL_TRC_BT2020_10
@ AVCOL_TRC_BT2020_10
ITU-R BT2020 for 10-bit system.
Definition: pixfmt.h:659
ScaleContext::out_transfer
int out_transfer
Definition: vf_scale.c:166
ff_all_color_spaces
AVFilterFormats * ff_all_color_spaces(void)
Construct an AVFilterFormats representing all possible color spaces.
Definition: formats.c:630
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:716
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AVFilterFormatsConfig
Lists of formats / etc.
Definition: avfilter.h:109
ScaleContext::out_color_matrix
int out_color_matrix
Definition: vf_scale.c:162
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:630
VAR_IW
@ VAR_IW
Definition: vf_scale.c:88
ScaleContext::in_transfer
int in_transfer
Definition: vf_scale.c:165
ScaleContext::sws
SwsContext * sws
Definition: vf_scale.c:135
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:661
AVCOL_PRI_SMPTE431
@ AVCOL_PRI_SMPTE431
SMPTE ST 431-2 (2011) / DCI P3.
Definition: pixfmt.h:633
eval.h
VAR_IH
@ VAR_IH
Definition: vf_scale.c:89
VAR_REF_SAR
@ VAR_REF_SAR
Definition: vf_scale.c:107
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVCOL_TRC_SMPTE240M
@ AVCOL_TRC_SMPTE240M
Definition: pixfmt.h:652
AVCOL_PRI_FILM
@ AVCOL_PRI_FILM
colour filters using Illuminant C
Definition: pixfmt.h:629
sws_alloc_context
SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext and set its fields to default values.
Definition: utils.c:1009
AVClass::child_next
void *(* child_next)(void *obj, void *prev)
Return next AVOptions-enabled child or NULL.
Definition: log.h:149
child_class_iterate
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:1068
ScaleContext::w
int w
New dimensions.
Definition: vf_scale.c:144
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:537
scale_frame
static int scale_frame(AVFilterLink *link, AVFrame **frame_in, AVFrame **frame_out)
Definition: vf_scale.c:746
VAR_RH
@ VAR_RH
Definition: vf_scale.c:105
TS2T
#define TS2T(ts, tb)
Definition: filters.h:481
AVCHROMA_LOC_UNSPECIFIED
@ AVCHROMA_LOC_UNSPECIFIED
Definition: pixfmt.h:770
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:728
IS_SCALE2REF
#define IS_SCALE2REF(ctx)
Definition: vf_scale.c:186
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:497
scale_eval.h
VAR_RW
@ VAR_RW
Definition: vf_scale.c:104
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
ScaleContext::hsub
int hsub
Definition: vf_scale.c:148
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale.c:90
imgutils_internal.h
sws_test_transfer
int sws_test_transfer(enum AVColorTransferCharacteristic trc, int output)
Test if a given color transfer function is supported.
Definition: format.c:531
ff_all_color_ranges
AVFilterFormats * ff_all_color_ranges(void)
Construct an AVFilterFormats representing all possible color ranges.
Definition: formats.c:646
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:3360
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:983
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
av_frame_side_data_remove_by_props
void av_frame_side_data_remove_by_props(AVFrameSideData ***sd, int *nb_sd, int props)
Remove and free all side data instances that match any of the given side data properties.
Definition: side_data.c:115
AVCOL_TRC_BT709
@ AVCOL_TRC_BT709
also ITU-R BT1361
Definition: pixfmt.h:646
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:681
ScaleContext::vsub
int vsub
chroma subsampling
Definition: vf_scale.c:148
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:624
interlaced
uint8_t interlaced
Definition: mxfenc.c:2287
VAR_SAR
@ VAR_SAR
Definition: vf_scale.c:93
VAR_RDAR
@ VAR_RDAR
Definition: vf_scale.c:108
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:684
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:121
internal.h
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:857
EvalMode
EvalMode
Definition: af_volume.h:39
FILTER_QUERY_FUNC2
#define FILTER_QUERY_FUNC2(func)
Definition: filters.h:239
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:676
ScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale.c:154
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:669
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:733
AV_SIDE_DATA_PROP_COLOR_DEPENDENT
@ AV_SIDE_DATA_PROP_COLOR_DEPENDENT
Side data depends on the video color space.
Definition: frame.h:299
avfilter_vf_scale_outputs
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:1206
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
AVCOL_PRI_BT470M
@ AVCOL_PRI_BT470M
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:624
ret
ret
Definition: filter_design.txt:187
pixfmt.h
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
ScaleContext::in_color_matrix
int in_color_matrix
Definition: vf_scale.c:161
VAR_REF_HSUB
@ VAR_REF_HSUB
Definition: vf_scale.c:109
child_next
static void * child_next(void *obj, void *prev)
Definition: vf_scale.c:1082
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:86
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:517
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:122
scale_eval_dimensions
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:535
var_names
static const char *const var_names[]
Definition: vf_scale.c:46
AVFrame::height
int height
Definition: frame.h:482
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:118
scale_options
static const AVOption scale_options[]
Definition: vf_scale.c:1096
AVCOL_TRC_ARIB_STD_B67
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:665
framesync.h
do_scale
static int do_scale(FFFrameSync *fs)
Definition: vf_scale.c:915
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:772
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:678
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:450
ScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale.c:177
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
AVCOL_TRC_SMPTE170M
@ AVCOL_TRC_SMPTE170M
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:651
avfilter_vf_scale2ref_inputs
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1255
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:257
VAR_OW
@ VAR_OW
Definition: vf_scale.c:90
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
VAR_VSUB
@ VAR_VSUB
Definition: vf_scale.c:96
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:269
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
AVCOL_PRI_SMPTE432
@ AVCOL_PRI_SMPTE432
SMPTE ST 432-1 (2010) / P3 D65 / Display P3.
Definition: pixfmt.h:634
ScaleContext::interlaced
int interlaced
Definition: vf_scale.c:150
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
VAR_N
@ VAR_N
Definition: vf_scale.c:99
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
ff_vf_scale2ref
const FFFilter ff_vf_scale2ref
Definition: vf_scale.c:185
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
scale_class
static const AVClass scale_class
Definition: vf_scale.c:1189
ScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale.c:153
ff_vf_scale
const FFFilter ff_vf_scale
Definition: vf_scale.c:1214
sws_free_context
void sws_free_context(SwsContext **ctx)
Free the context and everything associated with it, and write NULL to the provided pointer.
Definition: utils.c:2315
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale.c:128
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
AVCHROMA_LOC_NB
@ AVCHROMA_LOC_NB
Not part of ABI.
Definition: pixfmt.h:777
av_opt_get
int av_opt_get(void *obj, const char *name, int search_flags, uint8_t **out_val)
Definition: opt.c:1215
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVCOL_TRC_SMPTE428
@ AVCOL_TRC_SMPTE428
SMPTE ST 428-1.
Definition: pixfmt.h:663
VAR_REF_VSUB
@ VAR_REF_VSUB
Definition: vf_scale.c:110
sws_scale_frame
int sws_scale_frame(SwsContext *sws, AVFrame *dst, const AVFrame *src)
Scale source data from src and write the output to dst.
Definition: swscale.c:1355
h
h
Definition: vp9dsp_template.c:2070
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:352
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
VAR_OHSUB
@ VAR_OHSUB
Definition: vf_scale.c:97
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:675
SwsContext
Main external API structure.
Definition: swscale.h:182
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
snprintf
#define snprintf
Definition: snprintf.h:34
ScaleContext::size_str
char * size_str
Definition: vf_scale.c:145
av_color_transfer_name
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:3696
VAR_S2R_MAIN_VSUB
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:120
AVCHROMA_LOC_BOTTOMLEFT
@ AVCHROMA_LOC_BOTTOMLEFT
Definition: pixfmt.h:775
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
ff_framesync_preinit
void ff_framesync_preinit(FFFrameSync *fs)
Pre-initialize a frame sync structure.
Definition: framesync.c:78
swscale.h
ScaleContext::h
int h
Definition: vf_scale.c:144
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:312
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3261
ScaleContext::in_v_chr_pos
int in_v_chr_pos
Definition: vf_scale.c:175