FFmpeg
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <float.h>
27 #include <stdio.h>
28 #include <string.h>
29 
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "formats.h"
33 #include "framesync.h"
34 #include "scale_eval.h"
35 #include "video.h"
36 #include "libavutil/eval.h"
38 #include "libavutil/internal.h"
39 #include "libavutil/mem.h"
40 #include "libavutil/opt.h"
41 #include "libavutil/parseutils.h"
42 #include "libavutil/pixdesc.h"
43 #include "libswscale/swscale.h"
44 
45 static const char *const var_names[] = {
46  "in_w", "iw",
47  "in_h", "ih",
48  "out_w", "ow",
49  "out_h", "oh",
50  "a",
51  "sar",
52  "dar",
53  "hsub",
54  "vsub",
55  "ohsub",
56  "ovsub",
57  "n",
58  "t",
59 #if FF_API_FRAME_PKT
60  "pos",
61 #endif
62  "ref_w", "rw",
63  "ref_h", "rh",
64  "ref_a",
65  "ref_sar",
66  "ref_dar", "rdar",
67  "ref_hsub",
68  "ref_vsub",
69  "ref_n",
70  "ref_t",
71  "ref_pos",
72  /* Legacy variables for scale2ref */
73  "main_w",
74  "main_h",
75  "main_a",
76  "main_sar",
77  "main_dar", "mdar",
78  "main_hsub",
79  "main_vsub",
80  "main_n",
81  "main_t",
82  "main_pos",
83  NULL
84 };
85 
86 enum var_name {
100 #if FF_API_FRAME_PKT
101  VAR_POS,
102 #endif
124 };
125 
126 enum EvalMode {
130 };
131 
132 typedef struct ScaleContext {
133  const AVClass *class;
134  struct SwsContext *sws; ///< software scaler context
135  struct SwsContext *isws[2]; ///< software scaler context for interlaced material
136  // context used for forwarding options to sws
139 
140  /**
141  * New dimensions. Special values are:
142  * 0 = original width/height
143  * -1 = keep original aspect
144  * -N = try to keep aspect but make sure it is divisible by N
145  */
146  int w, h;
147  char *size_str;
148  double param[2]; // sws params
149 
150  int hsub, vsub; ///< chroma subsampling
151  int slice_y; ///< top of current output slice
152  int input_is_pal; ///< set to 1 if the input format is paletted
153  int output_is_pal; ///< set to 1 if the output format is paletted
155  int uses_ref;
156 
157  char *w_expr; ///< width expression string
158  char *h_expr; ///< height expression string
162 
163  char *flags_str;
164 
167 
168  int in_range;
170 
177 
180 
181  int eval_mode; ///< expression evaluation mode
182 
183 } ScaleContext;
184 
186 
187 static int config_props(AVFilterLink *outlink);
188 
190 {
191  ScaleContext *scale = ctx->priv;
192  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
193 
194  if (!scale->w_pexpr && !scale->h_pexpr)
195  return AVERROR(EINVAL);
196 
197  if (scale->w_pexpr)
198  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
199  if (scale->h_pexpr)
200  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
201 
202  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
203  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
204  return AVERROR(EINVAL);
205  }
206 
207  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
208  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
209  return AVERROR(EINVAL);
210  }
211 
212  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
213  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
214  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
215  }
216 
217  if (vars_w[VAR_REF_W] || vars_h[VAR_REF_W] ||
218  vars_w[VAR_RW] || vars_h[VAR_RW] ||
219  vars_w[VAR_REF_H] || vars_h[VAR_REF_H] ||
220  vars_w[VAR_RH] || vars_h[VAR_RH] ||
221  vars_w[VAR_REF_A] || vars_h[VAR_REF_A] ||
222  vars_w[VAR_REF_SAR] || vars_h[VAR_REF_SAR] ||
223  vars_w[VAR_REF_DAR] || vars_h[VAR_REF_DAR] ||
224  vars_w[VAR_RDAR] || vars_h[VAR_RDAR] ||
225  vars_w[VAR_REF_HSUB] || vars_h[VAR_REF_HSUB] ||
226  vars_w[VAR_REF_VSUB] || vars_h[VAR_REF_VSUB] ||
227  vars_w[VAR_REF_N] || vars_h[VAR_REF_N] ||
228  vars_w[VAR_REF_T] || vars_h[VAR_REF_T] ||
229  vars_w[VAR_REF_POS] || vars_h[VAR_REF_POS]) {
230  scale->uses_ref = 1;
231  }
232 
233  if (ctx->filter != &ff_vf_scale2ref &&
234  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
235  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
236  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
237  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
238  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
239  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
240  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
241  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
242  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
243  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
244  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
245  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
246  return AVERROR(EINVAL);
247  }
248 
249  if (ctx->filter != &ff_vf_scale2ref &&
250  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
251  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
252  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
253  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
254  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
255  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
256  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
257  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
258  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
259  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
260  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
261  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
262  return AVERROR(EINVAL);
263  }
264 
265  if (scale->eval_mode == EVAL_MODE_INIT &&
266  (vars_w[VAR_N] || vars_h[VAR_N] ||
267  vars_w[VAR_T] || vars_h[VAR_T] ||
269  vars_w[VAR_POS] || vars_h[VAR_POS] ||
270 #endif
271  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
272  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
273  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
274  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
275  return AVERROR(EINVAL);
276  }
277 
278  return 0;
279 }
280 
281 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
282 {
283  ScaleContext *scale = ctx->priv;
284  int ret, is_inited = 0;
285  char *old_str_expr = NULL;
286  AVExpr *old_pexpr = NULL;
287 
288  if (str_expr) {
289  old_str_expr = av_strdup(str_expr);
290  if (!old_str_expr)
291  return AVERROR(ENOMEM);
292  av_opt_set(scale, var, args, 0);
293  }
294 
295  if (*pexpr_ptr) {
296  old_pexpr = *pexpr_ptr;
297  *pexpr_ptr = NULL;
298  is_inited = 1;
299  }
300 
301  ret = av_expr_parse(pexpr_ptr, args, var_names,
302  NULL, NULL, NULL, NULL, 0, ctx);
303  if (ret < 0) {
304  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
305  goto revert;
306  }
307 
308  ret = check_exprs(ctx);
309  if (ret < 0)
310  goto revert;
311 
312  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
313  goto revert;
314 
315  av_expr_free(old_pexpr);
316  old_pexpr = NULL;
317  av_freep(&old_str_expr);
318 
319  return 0;
320 
321 revert:
322  av_expr_free(*pexpr_ptr);
323  *pexpr_ptr = NULL;
324  if (old_str_expr) {
325  av_opt_set(scale, var, old_str_expr, 0);
326  av_free(old_str_expr);
327  }
328  if (old_pexpr)
329  *pexpr_ptr = old_pexpr;
330 
331  return ret;
332 }
333 
335 {
336  ScaleContext *scale = ctx->priv;
337  int ret;
338 
339  scale->sws_opts = sws_alloc_context();
340  if (!scale->sws_opts)
341  return AVERROR(ENOMEM);
342 
343  // set threads=0, so we can later check whether the user modified it
344  ret = av_opt_set_int(scale->sws_opts, "threads", 0, 0);
345  if (ret < 0)
346  return ret;
347 
349 
350  return 0;
351 }
352 
353 static const int sws_colorspaces[] = {
362  -1
363 };
364 
365 static int do_scale(FFFrameSync *fs);
366 
368 {
369  ScaleContext *scale = ctx->priv;
370  int64_t threads;
371  int ret;
372 
373  if (ctx->filter == &ff_vf_scale2ref)
374  av_log(ctx, AV_LOG_WARNING, "scale2ref is deprecated, use scale=rw:rh instead\n");
375 
376  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
378  "Size and width/height expressions cannot be set at the same time.\n");
379  return AVERROR(EINVAL);
380  }
381 
382  if (scale->w_expr && !scale->h_expr)
383  FFSWAP(char *, scale->w_expr, scale->size_str);
384 
385  if (scale->size_str) {
386  char buf[32];
387  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
389  "Invalid size '%s'\n", scale->size_str);
390  return ret;
391  }
392  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
393  av_opt_set(scale, "w", buf, 0);
394  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
395  av_opt_set(scale, "h", buf, 0);
396  }
397  if (!scale->w_expr)
398  av_opt_set(scale, "w", "iw", 0);
399  if (!scale->h_expr)
400  av_opt_set(scale, "h", "ih", 0);
401 
402  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
403  if (ret < 0)
404  return ret;
405 
406  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
407  if (ret < 0)
408  return ret;
409 
410  if (scale->in_color_matrix != -1 &&
411  !ff_fmt_is_in(scale->in_color_matrix, sws_colorspaces)) {
412  av_log(ctx, AV_LOG_ERROR, "Unsupported input color matrix '%s'\n",
413  av_color_space_name(scale->in_color_matrix));
414  return AVERROR(EINVAL);
415  }
416 
417  if (!ff_fmt_is_in(scale->out_color_matrix, sws_colorspaces)) {
418  av_log(ctx, AV_LOG_ERROR, "Unsupported output color matrix '%s'\n",
419  av_color_space_name(scale->out_color_matrix));
420  return AVERROR(EINVAL);
421  }
422 
423  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
424  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
425 
426  if (scale->flags_str && *scale->flags_str) {
427  ret = av_opt_set(scale->sws_opts, "sws_flags", scale->flags_str, 0);
428  if (ret < 0)
429  return ret;
430  }
431 
432  for (int i = 0; i < FF_ARRAY_ELEMS(scale->param); i++)
433  if (scale->param[i] != DBL_MAX) {
434  ret = av_opt_set_double(scale->sws_opts, i ? "param1" : "param0",
435  scale->param[i], 0);
436  if (ret < 0)
437  return ret;
438  }
439 
440  // use generic thread-count if the user did not set it explicitly
441  ret = av_opt_get_int(scale->sws_opts, "threads", 0, &threads);
442  if (ret < 0)
443  return ret;
444  if (!threads)
445  av_opt_set_int(scale->sws_opts, "threads", ff_filter_get_nb_threads(ctx), 0);
446 
447  if (ctx->filter != &ff_vf_scale2ref && scale->uses_ref) {
448  AVFilterPad pad = {
449  .name = "ref",
450  .type = AVMEDIA_TYPE_VIDEO,
451  };
452  ret = ff_append_inpad(ctx, &pad);
453  if (ret < 0)
454  return ret;
455  }
456 
457  return 0;
458 }
459 
461 {
462  ScaleContext *scale = ctx->priv;
463  av_expr_free(scale->w_pexpr);
464  av_expr_free(scale->h_pexpr);
465  scale->w_pexpr = scale->h_pexpr = NULL;
467  sws_freeContext(scale->sws_opts);
468  sws_freeContext(scale->sws);
469  sws_freeContext(scale->isws[0]);
470  sws_freeContext(scale->isws[1]);
471  scale->sws = NULL;
472 }
473 
475  AVFilterFormatsConfig **cfg_in,
476  AVFilterFormatsConfig **cfg_out)
477 {
478  const ScaleContext *scale = ctx->priv;
480  const AVPixFmtDescriptor *desc;
481  enum AVPixelFormat pix_fmt;
482  int ret;
483 
484  desc = NULL;
485  formats = NULL;
486  while ((desc = av_pix_fmt_desc_next(desc))) {
490  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
491  return ret;
492  }
493  }
494  if ((ret = ff_formats_ref(formats, &cfg_in[0]->formats)) < 0)
495  return ret;
496 
497  desc = NULL;
498  formats = NULL;
499  while ((desc = av_pix_fmt_desc_next(desc))) {
503  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
504  return ret;
505  }
506  }
507  if ((ret = ff_formats_ref(formats, &cfg_out[0]->formats)) < 0)
508  return ret;
509 
510  /* accept all supported inputs, even if user overrides their properties */
512  &cfg_in[0]->color_spaces)) < 0)
513  return ret;
514 
516  &cfg_in[0]->color_ranges)) < 0)
517  return ret;
518 
519  /* propagate output properties if overridden */
520  formats = scale->out_color_matrix != AVCOL_SPC_UNSPECIFIED
521  ? ff_make_formats_list_singleton(scale->out_color_matrix)
523  if ((ret = ff_formats_ref(formats, &cfg_out[0]->color_spaces)) < 0)
524  return ret;
525 
526  formats = scale->out_range != AVCOL_RANGE_UNSPECIFIED
529  if ((ret = ff_formats_ref(formats, &cfg_out[0]->color_ranges)) < 0)
530  return ret;
531 
532  return 0;
533 }
534 
536 {
537  ScaleContext *scale = ctx->priv;
538  const char scale2ref = ctx->filter == &ff_vf_scale2ref;
539  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
540  const AVFilterLink *outlink = ctx->outputs[0];
542  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
543  char *expr;
544  int eval_w, eval_h;
545  int ret;
546  double res;
547  const AVPixFmtDescriptor *main_desc;
548  const AVFilterLink *main_link;
549 
550  if (scale2ref) {
551  main_link = ctx->inputs[0];
552  main_desc = av_pix_fmt_desc_get(main_link->format);
553  }
554 
555  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
556  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
557  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
558  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
559  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
560  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
561  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
562  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
563  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
564  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
565  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
566  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
567 
568  if (scale2ref) {
569  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
570  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
571  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
572  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
573  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
574  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
575  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
576  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
577  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
578  }
579 
580  if (scale->uses_ref) {
581  const AVFilterLink *reflink = ctx->inputs[1];
582  const AVPixFmtDescriptor *ref_desc = av_pix_fmt_desc_get(reflink->format);
583  scale->var_values[VAR_REF_W] = scale->var_values[VAR_RW] = reflink->w;
584  scale->var_values[VAR_REF_H] = scale->var_values[VAR_RH] = reflink->h;
585  scale->var_values[VAR_REF_A] = (double) reflink->w / reflink->h;
586  scale->var_values[VAR_REF_SAR] = reflink->sample_aspect_ratio.num ?
587  (double) reflink->sample_aspect_ratio.num / reflink->sample_aspect_ratio.den : 1;
588  scale->var_values[VAR_REF_DAR] = scale->var_values[VAR_RDAR] =
589  scale->var_values[VAR_REF_A] * scale->var_values[VAR_REF_SAR];
590  scale->var_values[VAR_REF_HSUB] = 1 << ref_desc->log2_chroma_w;
591  scale->var_values[VAR_REF_VSUB] = 1 << ref_desc->log2_chroma_h;
592  }
593 
594  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
595  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
596 
597  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
598  if (isnan(res)) {
599  expr = scale->h_expr;
600  ret = AVERROR(EINVAL);
601  goto fail;
602  }
603  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
604 
605  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
606  if (isnan(res)) {
607  expr = scale->w_expr;
608  ret = AVERROR(EINVAL);
609  goto fail;
610  }
611  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
612 
613  scale->w = eval_w;
614  scale->h = eval_h;
615 
616  return 0;
617 
618 fail:
620  "Error when evaluating the expression '%s'.\n", expr);
621  return ret;
622 }
623 
624 static void calc_chroma_pos(int *h_pos_out, int *v_pos_out, int chroma_loc,
625  int h_pos_override, int v_pos_override,
626  int h_sub, int v_sub, int index)
627 {
628  int h_pos, v_pos;
629 
630  /* Explicitly default to center siting for compatibility with swscale */
631  if (chroma_loc == AVCHROMA_LOC_UNSPECIFIED)
632  chroma_loc = AVCHROMA_LOC_CENTER;
633 
634  /* av_chroma_location_enum_to_pos() always gives us values in the range from
635  * 0 to 256, but we need to adjust this to the true value range of the
636  * subsampling grid, which may be larger for h/v_sub > 1 */
637  av_chroma_location_enum_to_pos(&h_pos, &v_pos, chroma_loc);
638  h_pos *= (1 << h_sub) - 1;
639  v_pos *= (1 << v_sub) - 1;
640 
641  if (h_pos_override != -513)
642  h_pos = h_pos_override;
643  if (v_pos_override != -513)
644  v_pos = v_pos_override;
645 
646  /* Fix vertical chroma position for interlaced frames */
647  if (v_sub && index > 0) {
648  /* When vertically subsampling, chroma samples are effectively only
649  * placed next to even rows. To access them from the odd field, we need
650  * to account for this shift by offsetting the distance of one luma row.
651  *
652  * For 4x vertical subsampling (v_sub == 2), they are only placed
653  * next to every *other* even row, so we need to shift by three luma
654  * rows to get to the chroma sample. */
655  if (index == 2)
656  v_pos += (256 << v_sub) - 256;
657 
658  /* Luma row distance is doubled for fields, so halve offsets */
659  v_pos >>= 1;
660  }
661 
662  /* Explicitly strip chroma offsets when not subsampling, because it
663  * interferes with the operation of flags like SWS_FULL_CHR_H_INP */
664  *h_pos_out = h_sub ? h_pos : -513;
665  *v_pos_out = v_sub ? v_pos : -513;
666 }
667 
668 static int config_props(AVFilterLink *outlink)
669 {
670  AVFilterContext *ctx = outlink->src;
671  AVFilterLink *inlink0 = outlink->src->inputs[0];
672  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref ?
673  outlink->src->inputs[1] :
674  outlink->src->inputs[0];
675  enum AVPixelFormat outfmt = outlink->format;
677  const AVPixFmtDescriptor *outdesc = av_pix_fmt_desc_get(outfmt);
678  ScaleContext *scale = ctx->priv;
679  uint8_t *flags_val = NULL;
680  int in_range, in_colorspace;
681  int ret;
682 
683  if ((ret = scale_eval_dimensions(ctx)) < 0)
684  goto fail;
685 
686  outlink->w = scale->w;
687  outlink->h = scale->h;
688 
689  ret = ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
690  scale->force_original_aspect_ratio,
691  scale->force_divisible_by);
692 
693  if (ret < 0)
694  goto fail;
695 
696  if (outlink->w > INT_MAX ||
697  outlink->h > INT_MAX ||
698  (outlink->h * inlink->w) > INT_MAX ||
699  (outlink->w * inlink->h) > INT_MAX)
700  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
701 
702  /* TODO: make algorithm configurable */
703 
704  scale->input_is_pal = desc->flags & AV_PIX_FMT_FLAG_PAL;
705  if (outfmt == AV_PIX_FMT_PAL8) outfmt = AV_PIX_FMT_BGR8;
706  scale->output_is_pal = av_pix_fmt_desc_get(outfmt)->flags & AV_PIX_FMT_FLAG_PAL;
707 
708  in_range = scale->in_range;
709  if (in_range == AVCOL_RANGE_UNSPECIFIED)
710  in_range = inlink0->color_range;
711 
712  in_colorspace = scale->in_color_matrix;
713  if (in_colorspace == -1 /* auto */)
714  in_colorspace = inlink0->colorspace;
715 
716  if (scale->sws)
717  sws_freeContext(scale->sws);
718  if (scale->isws[0])
719  sws_freeContext(scale->isws[0]);
720  if (scale->isws[1])
721  sws_freeContext(scale->isws[1]);
722  scale->isws[0] = scale->isws[1] = scale->sws = NULL;
723  if (inlink0->w == outlink->w &&
724  inlink0->h == outlink->h &&
725  in_range == outlink->color_range &&
726  in_colorspace == outlink->colorspace &&
727  inlink0->format == outlink->format &&
728  scale->in_chroma_loc == scale->out_chroma_loc)
729  ;
730  else {
731  struct SwsContext **swscs[3] = {&scale->sws, &scale->isws[0], &scale->isws[1]};
732  int i;
733 
734  for (i = 0; i < 3; i++) {
735  int in_full, out_full, brightness, contrast, saturation;
736  int h_chr_pos, v_chr_pos;
737  const int *inv_table, *table;
738  struct SwsContext *const s = sws_alloc_context();
739  if (!s)
740  return AVERROR(ENOMEM);
741  *swscs[i] = s;
742 
743  ret = av_opt_copy(s, scale->sws_opts);
744  if (ret < 0)
745  return ret;
746 
747  av_opt_set_int(s, "srcw", inlink0 ->w, 0);
748  av_opt_set_int(s, "srch", inlink0 ->h >> !!i, 0);
749  av_opt_set_int(s, "src_format", inlink0->format, 0);
750  av_opt_set_int(s, "dstw", outlink->w, 0);
751  av_opt_set_int(s, "dsth", outlink->h >> !!i, 0);
752  av_opt_set_int(s, "dst_format", outfmt, 0);
753  if (in_range != AVCOL_RANGE_UNSPECIFIED)
754  av_opt_set_int(s, "src_range",
755  in_range == AVCOL_RANGE_JPEG, 0);
756  if (outlink->color_range != AVCOL_RANGE_UNSPECIFIED)
757  av_opt_set_int(s, "dst_range",
758  outlink->color_range == AVCOL_RANGE_JPEG, 0);
759 
760  calc_chroma_pos(&h_chr_pos, &v_chr_pos, scale->in_chroma_loc,
761  scale->in_h_chr_pos, scale->in_v_chr_pos,
762  desc->log2_chroma_w, desc->log2_chroma_h, i);
763  av_opt_set_int(s, "src_h_chr_pos", h_chr_pos, 0);
764  av_opt_set_int(s, "src_v_chr_pos", v_chr_pos, 0);
765 
766  calc_chroma_pos(&h_chr_pos, &v_chr_pos, scale->out_chroma_loc,
767  scale->out_h_chr_pos, scale->out_v_chr_pos,
768  outdesc->log2_chroma_w, outdesc->log2_chroma_h, i);
769  av_opt_set_int(s, "dst_h_chr_pos", h_chr_pos, 0);
770  av_opt_set_int(s, "dst_v_chr_pos", v_chr_pos, 0);
771 
772  if ((ret = sws_init_context(s, NULL, NULL)) < 0)
773  return ret;
774 
775  sws_getColorspaceDetails(s, (int **)&inv_table, &in_full,
776  (int **)&table, &out_full,
777  &brightness, &contrast, &saturation);
778 
779  if (scale->in_color_matrix == -1 /* auto */)
780  inv_table = sws_getCoefficients(inlink0->colorspace);
781  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
782  inv_table = sws_getCoefficients(scale->in_color_matrix);
783  if (outlink->colorspace != AVCOL_SPC_UNSPECIFIED)
785  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
786  table = inv_table;
787 
788  sws_setColorspaceDetails(s, inv_table, in_full,
789  table, out_full,
790  brightness, contrast, saturation);
791 
792  if (!scale->interlaced)
793  break;
794  }
795  }
796 
797  if (inlink0->sample_aspect_ratio.num){
798  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
799  } else
800  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
801 
802  if (scale->sws)
803  av_opt_get(scale->sws, "sws_flags", 0, &flags_val);
804 
805  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d -> w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d flags:%s\n",
806  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
807  av_color_space_name(inlink->colorspace), av_color_range_name(inlink->color_range),
808  inlink->sample_aspect_ratio.num, inlink->sample_aspect_ratio.den,
809  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
811  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
812  flags_val);
813  av_freep(&flags_val);
814 
815  if (ctx->filter != &ff_vf_scale2ref) {
817  ret = ff_framesync_init(&scale->fs, ctx, ctx->nb_inputs);
818  if (ret < 0)
819  return ret;
820  scale->fs.on_event = do_scale;
821  scale->fs.in[0].time_base = ctx->inputs[0]->time_base;
822  scale->fs.in[0].sync = 1;
823  scale->fs.in[0].before = EXT_STOP;
824  scale->fs.in[0].after = EXT_STOP;
825  if (scale->uses_ref) {
826  av_assert0(ctx->nb_inputs == 2);
827  scale->fs.in[1].time_base = ctx->inputs[1]->time_base;
828  scale->fs.in[1].sync = 0;
829  scale->fs.in[1].before = EXT_NULL;
830  scale->fs.in[1].after = EXT_INFINITY;
831  }
832 
834  if (ret < 0)
835  return ret;
836  }
837 
838  return 0;
839 
840 fail:
841  return ret;
842 }
843 
844 static int config_props_ref(AVFilterLink *outlink)
845 {
846  AVFilterLink *inlink = outlink->src->inputs[1];
848  FilterLink *ol = ff_filter_link(outlink);
849 
850  outlink->w = inlink->w;
851  outlink->h = inlink->h;
852  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
853  outlink->time_base = inlink->time_base;
854  ol->frame_rate = il->frame_rate;
855  outlink->colorspace = inlink->colorspace;
856  outlink->color_range = inlink->color_range;
857 
858  return 0;
859 }
860 
861 static int request_frame(AVFilterLink *outlink)
862 {
863  return ff_request_frame(outlink->src->inputs[0]);
864 }
865 
866 static int request_frame_ref(AVFilterLink *outlink)
867 {
868  return ff_request_frame(outlink->src->inputs[1]);
869 }
870 
871 static void frame_offset(AVFrame *frame, int dir, int is_pal)
872 {
873  for (int i = 0; i < 4 && frame->data[i]; i++) {
874  if (i == 1 && is_pal)
875  break;
876  frame->data[i] += frame->linesize[i] * dir;
877  }
878 }
879 
881  int field)
882 {
883  int orig_h_src = src->height;
884  int orig_h_dst = dst->height;
885  int ret;
886 
887  // offset the data pointers for the bottom field
888  if (field) {
889  frame_offset(src, 1, scale->input_is_pal);
890  frame_offset(dst, 1, scale->output_is_pal);
891  }
892 
893  // take every second line
894  for (int i = 0; i < 4; i++) {
895  src->linesize[i] *= 2;
896  dst->linesize[i] *= 2;
897  }
898  src->height /= 2;
899  dst->height /= 2;
900 
901  ret = sws_scale_frame(scale->isws[field], dst, src);
902  if (ret < 0)
903  return ret;
904 
905  // undo the changes we made above
906  for (int i = 0; i < 4; i++) {
907  src->linesize[i] /= 2;
908  dst->linesize[i] /= 2;
909  }
910  src->height = orig_h_src;
911  dst->height = orig_h_dst;
912 
913  if (field) {
914  frame_offset(src, -1, scale->input_is_pal);
915  frame_offset(dst, -1, scale->output_is_pal);
916  }
917 
918  return 0;
919 }
920 
921 /* Takes over ownership of *frame_in, passes ownership of *frame_out to caller */
922 static int scale_frame(AVFilterLink *link, AVFrame **frame_in,
923  AVFrame **frame_out)
924 {
926  AVFilterContext *ctx = link->dst;
927  ScaleContext *scale = ctx->priv;
928  AVFilterLink *outlink = ctx->outputs[0];
929  AVFrame *out, *in = *frame_in;
931  char buf[32];
932  int ret;
933  int frame_changed;
934 
935  *frame_in = NULL;
936  if (in->colorspace == AVCOL_SPC_YCGCO)
937  av_log(link->dst, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
938 
939  frame_changed = in->width != link->w ||
940  in->height != link->h ||
941  in->format != link->format ||
944  in->colorspace != link->colorspace ||
945  in->color_range != link->color_range;
946 
947  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
948  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
949 
950  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
951  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
952 
953  if (scale->eval_mode == EVAL_MODE_FRAME &&
954  !frame_changed &&
955  ctx->filter != &ff_vf_scale2ref &&
956  !(vars_w[VAR_N] || vars_w[VAR_T]
958  || vars_w[VAR_POS]
959 #endif
960  ) &&
961  !(vars_h[VAR_N] || vars_h[VAR_T]
963  || vars_h[VAR_POS]
964 #endif
965  ) &&
966  scale->w && scale->h)
967  goto scale;
968 
969  if (scale->eval_mode == EVAL_MODE_INIT) {
970  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
971  av_opt_set(scale, "w", buf, 0);
972  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
973  av_opt_set(scale, "h", buf, 0);
974 
975  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
976  if (ret < 0)
977  goto err;
978 
979  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
980  if (ret < 0)
981  goto err;
982  }
983 
984  if (ctx->filter == &ff_vf_scale2ref) {
985  scale->var_values[VAR_S2R_MAIN_N] = inl->frame_count_out;
986  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
987 #if FF_API_FRAME_PKT
989  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
991 #endif
992  } else {
993  scale->var_values[VAR_N] = inl->frame_count_out;
994  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
995 #if FF_API_FRAME_PKT
997  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
999 #endif
1000  }
1001 
1002  link->dst->inputs[0]->format = in->format;
1003  link->dst->inputs[0]->w = in->width;
1004  link->dst->inputs[0]->h = in->height;
1005  link->dst->inputs[0]->colorspace = in->colorspace;
1006  link->dst->inputs[0]->color_range = in->color_range;
1007 
1008  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
1009  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
1010 
1011  if ((ret = config_props(outlink)) < 0)
1012  goto err;
1013  }
1014 
1015 scale:
1016  if (!scale->sws) {
1017  *frame_out = in;
1018  return 0;
1019  }
1020 
1021  scale->hsub = desc->log2_chroma_w;
1022  scale->vsub = desc->log2_chroma_h;
1023 
1024  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
1025  if (!out) {
1026  ret = AVERROR(ENOMEM);
1027  goto err;
1028  }
1029 
1030  av_frame_copy_props(out, in);
1031  out->width = outlink->w;
1032  out->height = outlink->h;
1033  out->color_range = outlink->color_range;
1034  out->colorspace = outlink->colorspace;
1035  if (scale->out_chroma_loc != AVCHROMA_LOC_UNSPECIFIED)
1036  out->chroma_location = scale->out_chroma_loc;
1037 
1038  if (scale->output_is_pal)
1039  avpriv_set_systematic_pal2((uint32_t*)out->data[1], outlink->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : outlink->format);
1040 
1041  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
1042  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
1043  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
1044  INT_MAX);
1045 
1046  if (scale->interlaced>0 || (scale->interlaced<0 &&
1047  (in->flags & AV_FRAME_FLAG_INTERLACED))) {
1048  ret = scale_field(scale, out, in, 0);
1049  if (ret >= 0)
1050  ret = scale_field(scale, out, in, 1);
1051  } else {
1052  ret = sws_scale_frame(scale->sws, out, in);
1053  }
1054 
1055  if (ret < 0)
1056  av_frame_free(&out);
1057  *frame_out = out;
1058 
1059 err:
1060  av_frame_free(&in);
1061  return ret;
1062 }
1063 
1065 {
1066  AVFilterContext *ctx = fs->parent;
1067  ScaleContext *scale = ctx->priv;
1068  AVFilterLink *outlink = ctx->outputs[0];
1069  AVFrame *out, *in = NULL, *ref = NULL;
1070  int ret = 0, frame_changed;
1071 
1072  ret = ff_framesync_get_frame(fs, 0, &in, 1);
1073  if (ret < 0)
1074  goto err;
1075 
1076  if (scale->uses_ref) {
1077  ret = ff_framesync_get_frame(fs, 1, &ref, 0);
1078  if (ret < 0)
1079  goto err;
1080  }
1081 
1082  if (ref) {
1083  AVFilterLink *reflink = ctx->inputs[1];
1084  FilterLink *rl = ff_filter_link(reflink);
1085 
1086  frame_changed = ref->width != reflink->w ||
1087  ref->height != reflink->h ||
1088  ref->format != reflink->format ||
1089  ref->sample_aspect_ratio.den != reflink->sample_aspect_ratio.den ||
1090  ref->sample_aspect_ratio.num != reflink->sample_aspect_ratio.num ||
1091  ref->colorspace != reflink->colorspace ||
1092  ref->color_range != reflink->color_range;
1093 
1094  if (frame_changed) {
1095  reflink->format = ref->format;
1096  reflink->w = ref->width;
1097  reflink->h = ref->height;
1098  reflink->sample_aspect_ratio.num = ref->sample_aspect_ratio.num;
1099  reflink->sample_aspect_ratio.den = ref->sample_aspect_ratio.den;
1100  reflink->colorspace = ref->colorspace;
1101  reflink->color_range = ref->color_range;
1102 
1103  ret = config_props(outlink);
1104  if (ret < 0)
1105  goto err;
1106  }
1107 
1108  if (scale->eval_mode == EVAL_MODE_FRAME) {
1109  scale->var_values[VAR_REF_N] = rl->frame_count_out;
1110  scale->var_values[VAR_REF_T] = TS2T(ref->pts, reflink->time_base);
1111 #if FF_API_FRAME_PKT
1113  scale->var_values[VAR_REF_POS] = ref->pkt_pos == -1 ? NAN : ref->pkt_pos;
1115 #endif
1116  }
1117  }
1118 
1119  ret = scale_frame(ctx->inputs[0], &in, &out);
1120  if (ret < 0)
1121  goto err;
1122 
1123  av_assert0(out);
1124  out->pts = av_rescale_q(fs->pts, fs->time_base, outlink->time_base);
1125  return ff_filter_frame(outlink, out);
1126 
1127 err:
1128  av_frame_free(&in);
1129  return ret;
1130 }
1131 
1133 {
1134  AVFilterContext *ctx = link->dst;
1135  AVFilterLink *outlink = ctx->outputs[0];
1136  AVFrame *out;
1137  int ret;
1138 
1139  ret = scale_frame(link, &in, &out);
1140  if (out)
1141  return ff_filter_frame(outlink, out);
1142 
1143  return ret;
1144 }
1145 
1147 {
1149  ScaleContext *scale = link->dst->priv;
1150  AVFilterLink *outlink = link->dst->outputs[1];
1151  int frame_changed;
1152 
1153  frame_changed = in->width != link->w ||
1154  in->height != link->h ||
1155  in->format != link->format ||
1158  in->colorspace != link->colorspace ||
1159  in->color_range != link->color_range;
1160 
1161  if (frame_changed) {
1162  link->format = in->format;
1163  link->w = in->width;
1164  link->h = in->height;
1167  link->colorspace = in->colorspace;
1168  link->color_range = in->color_range;
1169 
1170  config_props_ref(outlink);
1171  }
1172 
1173  if (scale->eval_mode == EVAL_MODE_FRAME) {
1174  scale->var_values[VAR_N] = l->frame_count_out;
1175  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
1176 #if FF_API_FRAME_PKT
1178  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
1180 #endif
1181  }
1182 
1183  return ff_filter_frame(outlink, in);
1184 }
1185 
1186 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
1187  char *res, int res_len, int flags)
1188 {
1189  ScaleContext *scale = ctx->priv;
1190  char *str_expr;
1191  AVExpr **pexpr_ptr;
1192  int ret, w, h;
1193 
1194  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
1195  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
1196 
1197  if (w || h) {
1198  str_expr = w ? scale->w_expr : scale->h_expr;
1199  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
1200 
1201  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
1202  } else
1203  ret = AVERROR(ENOSYS);
1204 
1205  if (ret < 0)
1206  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
1207 
1208  return ret;
1209 }
1210 
1212 {
1213  ScaleContext *scale = ctx->priv;
1214  return ff_framesync_activate(&scale->fs);
1215 }
1216 
1217 static const AVClass *child_class_iterate(void **iter)
1218 {
1219  switch ((uintptr_t) *iter) {
1220  case 0:
1221  *iter = (void*)(uintptr_t) 1;
1222  return sws_get_class();
1223  case 1:
1224  *iter = (void*)(uintptr_t) 2;
1225  return &ff_framesync_class;
1226  }
1227 
1228  return NULL;
1229 }
1230 
1231 static void *child_next(void *obj, void *prev)
1232 {
1233  ScaleContext *s = obj;
1234  if (!prev)
1235  return s->sws_opts;
1236  if (prev == s->sws_opts)
1237  return &s->fs;
1238  return NULL;
1239 }
1240 
1241 #define OFFSET(x) offsetof(ScaleContext, x)
1242 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
1243 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
1244 
1245 static const AVOption scale_options[] = {
1246  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1247  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1248  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1249  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1250  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "" }, .flags = FLAGS },
1251  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
1252  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1253  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1254  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color" },
1255  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, 0, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color"},
1256  { "auto", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = -1 }, 0, 0, FLAGS, .unit = "color" },
1257  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1258  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1259  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1260  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT709 }, 0, 0, FLAGS, .unit = "color" },
1261  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_FCC }, 0, 0, FLAGS, .unit = "color" },
1262  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_SMPTE240M }, 0, 0, FLAGS, .unit = "color" },
1263  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT2020_NCL }, 0, 0, FLAGS, .unit = "color" },
1264  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1265  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1266  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1267  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1268  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1269  { "limited",NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1270  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1271  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1272  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1273  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1274  { "in_chroma_loc", "set input chroma sample location", OFFSET(in_chroma_loc), AV_OPT_TYPE_INT, { .i64 = AVCHROMA_LOC_UNSPECIFIED }, 0, AVCHROMA_LOC_NB-1, .flags = FLAGS, .unit = "chroma_loc" },
1275  { "out_chroma_loc", "set output chroma sample location", OFFSET(out_chroma_loc), AV_OPT_TYPE_INT, { .i64 = AVCHROMA_LOC_UNSPECIFIED }, 0, AVCHROMA_LOC_NB-1, .flags = FLAGS, .unit = "chroma_loc" },
1276  {"auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_UNSPECIFIED}, 0, 0, FLAGS, .unit = "chroma_loc"},
1277  {"unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_UNSPECIFIED}, 0, 0, FLAGS, .unit = "chroma_loc"},
1278  {"left", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_LEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1279  {"center", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_CENTER}, 0, 0, FLAGS, .unit = "chroma_loc"},
1280  {"topleft", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_TOPLEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1281  {"top", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_TOP}, 0, 0, FLAGS, .unit = "chroma_loc"},
1282  {"bottomleft", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_BOTTOMLEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1283  {"bottom", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_BOTTOM}, 0, 0, FLAGS, .unit = "chroma_loc"},
1284  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1285  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1286  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1287  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1288  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, .unit = "force_oar" },
1289  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1290  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1291  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1292  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
1293  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1294  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1295  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1296  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
1297  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
1298  { NULL }
1299 };
1300 
1301 static const AVClass scale_class = {
1302  .class_name = "scale",
1303  .item_name = av_default_item_name,
1304  .option = scale_options,
1305  .version = LIBAVUTIL_VERSION_INT,
1306  .category = AV_CLASS_CATEGORY_FILTER,
1307  .child_class_iterate = child_class_iterate,
1309 };
1310 
1312  {
1313  .name = "default",
1314  .type = AVMEDIA_TYPE_VIDEO,
1315  },
1316 };
1317 
1319  {
1320  .name = "default",
1321  .type = AVMEDIA_TYPE_VIDEO,
1322  .config_props = config_props,
1323  },
1324 };
1325 
1327  .name = "scale",
1328  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
1329  .preinit = preinit,
1330  .init = init,
1331  .uninit = uninit,
1332  .priv_size = sizeof(ScaleContext),
1333  .priv_class = &scale_class,
1337  .activate = activate,
1338  .process_command = process_command,
1340 };
1341 
1342 static const AVClass *scale2ref_child_class_iterate(void **iter)
1343 {
1344  const AVClass *c = *iter ? NULL : sws_get_class();
1345  *iter = (void*)(uintptr_t)c;
1346  return c;
1347 }
1348 
1349 static void *scale2ref_child_next(void *obj, void *prev)
1350 {
1351  ScaleContext *s = obj;
1352  if (!prev)
1353  return s->sws_opts;
1354  return NULL;
1355 }
1356 
1357 static const AVClass scale2ref_class = {
1358  .class_name = "scale(2ref)",
1359  .item_name = av_default_item_name,
1360  .option = scale_options,
1361  .version = LIBAVUTIL_VERSION_INT,
1362  .category = AV_CLASS_CATEGORY_FILTER,
1363  .child_class_iterate = scale2ref_child_class_iterate,
1365 };
1366 
1368  {
1369  .name = "default",
1370  .type = AVMEDIA_TYPE_VIDEO,
1371  .filter_frame = filter_frame,
1372  },
1373  {
1374  .name = "ref",
1375  .type = AVMEDIA_TYPE_VIDEO,
1376  .filter_frame = filter_frame_ref,
1377  },
1378 };
1379 
1381  {
1382  .name = "default",
1383  .type = AVMEDIA_TYPE_VIDEO,
1384  .config_props = config_props,
1385  .request_frame= request_frame,
1386  },
1387  {
1388  .name = "ref",
1389  .type = AVMEDIA_TYPE_VIDEO,
1390  .config_props = config_props_ref,
1391  .request_frame= request_frame_ref,
1392  },
1393 };
1394 
1396  .name = "scale2ref",
1397  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1398  .preinit = preinit,
1399  .init = init,
1400  .uninit = uninit,
1401  .priv_size = sizeof(ScaleContext),
1402  .priv_class = &scale2ref_class,
1406  .process_command = process_command,
1407 };
filter_frame_ref
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:1146
ScaleContext::param
double param[2]
Definition: vf_scale.c:148
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:116
formats
formats
Definition: signature.h:47
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:116
ScaleContext::fs
FFFrameSync fs
Definition: vf_scale.c:138
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:115
VAR_HSUB
@ VAR_HSUB
Definition: vf_scale.c:94
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:844
sws_setColorspaceDetails
int sws_setColorspaceDetails(SwsContext *c, const int inv_table[4], int srcRange, const int table[4], int dstRange, int brightness, int contrast, int saturation)
Definition: utils.c:1046
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:137
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:668
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
TFLAGS
#define TFLAGS
Definition: vf_scale.c:1243
ScaleContext::sws_opts
struct SwsContext * sws_opts
Definition: vf_scale.c:137
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:189
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
var_name
var_name
Definition: noise.c:47
ScaleContext::input_is_pal
int input_is_pal
set to 1 if the input format is paletted
Definition: vf_scale.c:152
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:435
VAR_REF_POS
@ VAR_REF_POS
Definition: vf_scale.c:112
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:301
out
FILE * out
Definition: movenc.c:55
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
ScaleContext
Definition: vf_scale.c:132
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3170
AVCHROMA_LOC_BOTTOM
@ AVCHROMA_LOC_BOTTOM
Definition: pixfmt.h:743
ScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale.c:179
VAR_REF_N
@ VAR_REF_N
Definition: vf_scale.c:110
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:269
avfilter_vf_scale2ref_outputs
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1380
FLAGS
#define FLAGS
Definition: vf_scale.c:1242
int64_t
long long int64_t
Definition: coverity.c:34
ScaleContext::flags_str
char * flags_str
Definition: vf_scale.c:163
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
sws_freeContext
void sws_freeContext(SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2446
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:679
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:501
AVFrame::width
int width
Definition: frame.h:461
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:717
VAR_A
@ VAR_A
Definition: vf_scale.c:91
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:866
av_opt_set_double
int av_opt_set_double(void *obj, const char *name, double val, int search_flags)
Definition: opt.c:877
AVOption
AVOption.
Definition: opt.h:429
AVCOL_SPC_NB
@ AVCOL_SPC_NB
Not part of ABI.
Definition: pixfmt.h:660
scale_parse_expr
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:281
scale2ref_class
static const AVClass scale2ref_class
Definition: vf_scale.c:1357
table
static const uint16_t table[]
Definition: prosumer.c:203
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:861
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:3177
VAR_REF_T
@ VAR_REF_T
Definition: vf_scale.c:111
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:475
VAR_S2R_MAIN_HSUB
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:118
ScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale.c:161
ScaleContext::out_range
int out_range
Definition: vf_scale.c:169
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale.c:117
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:225
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:641
float.h
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale.c:128
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:114
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:661
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:205
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
ScaleContext::in_h_chr_pos
int in_h_chr_pos
Definition: vf_scale.c:175
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale.c:90
video.h
ff_make_formats_list_singleton
AVFilterFormats * ff_make_formats_list_singleton(int fmt)
Equivalent to ff_make_format_list({const int[]}{ fmt, -1 })
Definition: formats.c:529
ScaleContext::out_chroma_loc
int out_chroma_loc
Definition: vf_scale.c:172
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:122
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:646
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
ff_append_inpad
int ff_append_inpad(AVFilterContext *f, AVFilterPad *p)
Append a new input/output pad to the filter's list of such pads.
Definition: avfilter.c:127
av_color_space_name
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:3546
VAR_DAR
@ VAR_DAR
Definition: vf_scale.c:93
avfilter_vf_scale_inputs
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:1311
fail
#define fail()
Definition: checkasm.h:188
VARS_NB
@ VARS_NB
Definition: vf_scale.c:123
frame_offset
static void frame_offset(AVFrame *frame, int dir, int is_pal)
Definition: vf_scale.c:871
ScaleContext::isws
struct SwsContext * isws[2]
software scaler context for interlaced material
Definition: vf_scale.c:135
VAR_REF_A
@ VAR_REF_A
Definition: vf_scale.c:105
sws_init_context
av_warn_unused_result int sws_init_context(SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter)
Initialize the swscaler context sws_context.
Definition: utils.c:2081
ScaleContext::eval_mode
int eval_mode
expression evaluation mode
Definition: vf_scale.c:181
EXT_NULL
@ EXT_NULL
Ignore this stream and continue processing the other ones.
Definition: framesync.h:70
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale.c:88
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale.c:129
ScaleContext::in_chroma_loc
int in_chroma_loc
Definition: vf_scale.c:171
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:97
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:827
VAR_REF_W
@ VAR_REF_W
Definition: vf_scale.c:103
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:141
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
OFFSET
#define OFFSET(x)
Definition: vf_scale.c:1241
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: vf_scale.c:334
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
activate
static int activate(AVFilterContext *ctx)
Definition: vf_scale.c:1211
AV_PIX_FMT_BGR8
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:90
SwsContext
struct SwsContext SwsContext
Definition: swscale.h:45
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_REF_H
@ VAR_REF_H
Definition: vf_scale.c:104
scale2ref_child_next
static void * scale2ref_child_next(void *obj, void *prev)
Definition: vf_scale.c:1349
ScaleContext::sws
struct SwsContext * sws
software scaler context
Definition: vf_scale.c:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
VAR_OH
@ VAR_OH
Definition: vf_scale.c:90
AVCHROMA_LOC_TOP
@ AVCHROMA_LOC_TOP
Definition: pixfmt.h:741
av_chroma_location_enum_to_pos
int av_chroma_location_enum_to_pos(int *xpos, int *ypos, enum AVChromaLocation pos)
Converts AVChromaLocation to swscale x/y chroma position.
Definition: pixdesc.c:3588
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:113
ScaleContext::slice_y
int slice_y
top of current output slice
Definition: vf_scale.c:151
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:647
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Underlying C type is double.
Definition: opt.h:267
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
query_formats
static int query_formats(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out)
Definition: vf_scale.c:474
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_scale.c:367
VAR_OVSUB
@ VAR_OVSUB
Definition: vf_scale.c:97
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
filters.h
ScaleContext::uses_ref
int uses_ref
Definition: vf_scale.c:155
ctx
AVFormatContext * ctx
Definition: movenc.c:49
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:1186
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVExpr
Definition: eval.c:158
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
ff_fmt_is_in
int ff_fmt_is_in(int fmt, const int *fmts)
Tell if an integer is contained in the provided -1-terminated list of integers.
Definition: formats.c:406
ScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale.c:159
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
avpriv_set_systematic_pal2
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:178
NAN
#define NAN
Definition: mathematics.h:115
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
ScaleContext::out_h_chr_pos
int out_h_chr_pos
Definition: vf_scale.c:173
av_color_range_name
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:3486
scale_field
static int scale_field(ScaleContext *scale, AVFrame *dst, AVFrame *src, int field)
Definition: vf_scale.c:880
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
VAR_REF_DAR
@ VAR_REF_DAR
Definition: vf_scale.c:107
ff_framesync_class
const AVClass ff_framesync_class
Definition: framesync.c:54
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
ScaleContext::out_v_chr_pos
int out_v_chr_pos
Definition: vf_scale.c:174
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:713
VAR_POS
@ VAR_POS
Definition: noise.c:56
VAR_T
@ VAR_T
Definition: vf_scale.c:99
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:200
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:738
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:740
isnan
#define isnan(x)
Definition: libm.h:340
scale2ref_child_class_iterate
static const AVClass * scale2ref_child_class_iterate(void **iter)
Definition: vf_scale.c:1342
ScaleContext::in_range
int in_range
Definition: vf_scale.c:168
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:465
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale.c:87
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
ff_add_format
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:504
parseutils.h
ScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale.c:160
double
double
Definition: af_crystalizer.c:132
AVCOL_SPC_YCGCO
@ AVCOL_SPC_YCGCO
used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
Definition: pixfmt.h:649
av_opt_get_int
int av_opt_get_int(void *obj, const char *name, int search_flags, int64_t *out_val)
Definition: opt.c:1265
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:94
ff_vf_scale2ref
const AVFilter ff_vf_scale2ref
Definition: vf_scale.c:185
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:683
index
int index
Definition: gxfenc.c:90
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AVFilterFormatsConfig
Lists of formats / etc.
Definition: avfilter.h:111
ScaleContext::out_color_matrix
int out_color_matrix
Definition: vf_scale.c:166
av_opt_set_int
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
Definition: opt.c:872
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
VAR_IW
@ VAR_IW
Definition: vf_scale.c:87
av_opt_copy
int av_opt_copy(void *dst, const void *src)
Copy options from src object into dest object.
Definition: opt.c:2143
eval.h
VAR_IH
@ VAR_IH
Definition: vf_scale.c:88
VAR_REF_SAR
@ VAR_REF_SAR
Definition: vf_scale.c:106
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
sws_alloc_context
SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1227
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
AVClass::child_next
void *(* child_next)(void *obj, void *prev)
Return next AVOptions-enabled child or NULL.
Definition: log.h:149
child_class_iterate
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:1217
ScaleContext::w
int w
New dimensions.
Definition: vf_scale.c:146
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:516
scale_frame
static int scale_frame(AVFilterLink *link, AVFrame **frame_in, AVFrame **frame_out)
Definition: vf_scale.c:922
VAR_RH
@ VAR_RH
Definition: vf_scale.c:104
TS2T
#define TS2T(ts, tb)
Definition: filters.h:278
AVCHROMA_LOC_UNSPECIFIED
@ AVCHROMA_LOC_UNSPECIFIED
Definition: pixfmt.h:737
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:699
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:476
scale_eval.h
VAR_RW
@ VAR_RW
Definition: vf_scale.c:103
sws_getColorspaceDetails
int sws_getColorspaceDetails(SwsContext *c, int **inv_table, int *srcRange, int **table, int *dstRange, int *brightness, int *contrast, int *saturation)
Definition: utils.c:1202
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
ScaleContext::hsub
int hsub
Definition: vf_scale.c:150
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale.c:89
imgutils_internal.h
ff_all_color_ranges
AVFilterFormats * ff_all_color_ranges(void)
Construct an AVFilterFormats representing all possible color ranges.
Definition: formats.c:646
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:3189
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:1132
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
sws_isSupportedInput
#define sws_isSupportedInput(x)
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:648
ScaleContext::vsub
int vsub
chroma subsampling
Definition: vf_scale.c:150
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:668
interlaced
uint8_t interlaced
Definition: mxfenc.c:2270
ScaleContext::output_is_pal
int output_is_pal
set to 1 if the output format is paletted
Definition: vf_scale.c:153
VAR_SAR
@ VAR_SAR
Definition: vf_scale.c:92
VAR_RDAR
@ VAR_RDAR
Definition: vf_scale.c:107
sws_isSupportedEndiannessConversion
int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt)
Definition: utils.c:386
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:651
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:120
internal.h
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:841
EvalMode
EvalMode
Definition: af_volume.h:39
FILTER_QUERY_FUNC2
#define FILTER_QUERY_FUNC2(func)
Definition: filters.h:239
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:643
ScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale.c:158
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:648
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:700
calc_chroma_pos
static void calc_chroma_pos(int *h_pos_out, int *v_pos_out, int chroma_loc, int h_pos_override, int v_pos_override, int h_sub, int v_sub, int index)
Definition: vf_scale.c:624
avfilter_vf_scale_outputs
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:1318
AVFilter
Filter definition.
Definition: avfilter.h:201
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ScaleContext::in_color_matrix
int in_color_matrix
Definition: vf_scale.c:165
VAR_REF_HSUB
@ VAR_REF_HSUB
Definition: vf_scale.c:108
child_next
static void * child_next(void *obj, void *prev)
Definition: vf_scale.c:1231
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:86
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:496
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:121
scale_eval_dimensions
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:535
var_names
static const char *const var_names[]
Definition: vf_scale.c:45
AVFrame::height
int height
Definition: frame.h:461
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:117
scale_options
static const AVOption scale_options[]
Definition: vf_scale.c:1245
framesync.h
do_scale
static int do_scale(FFFrameSync *fs)
Definition: vf_scale.c:1064
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:739
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:645
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:460
ScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale.c:178
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
avfilter_vf_scale2ref_inputs
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1367
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:457
VAR_OW
@ VAR_OW
Definition: vf_scale.c:89
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
VAR_VSUB
@ VAR_VSUB
Definition: vf_scale.c:95
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
sws_getCoefficients
const int * sws_getCoefficients(int colorspace)
Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDet...
Definition: yuv2rgb.c:61
sws_colorspaces
static const int sws_colorspaces[]
Definition: vf_scale.c:353
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ScaleContext::interlaced
int interlaced
Definition: vf_scale.c:154
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
VAR_N
@ VAR_N
Definition: vf_scale.c:98
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
scale_class
static const AVClass scale_class
Definition: vf_scale.c:1301
ScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale.c:157
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale.c:127
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
AVCHROMA_LOC_NB
@ AVCHROMA_LOC_NB
Not part of ABI.
Definition: pixfmt.h:744
av_opt_get
int av_opt_get(void *obj, const char *name, int search_flags, uint8_t **out_val)
Definition: opt.c:1207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
VAR_REF_VSUB
@ VAR_REF_VSUB
Definition: vf_scale.c:109
sws_scale_frame
int sws_scale_frame(SwsContext *sws, AVFrame *dst, const AVFrame *src)
Scale source data from src and write the output to dst.
Definition: swscale.c:1225
h
h
Definition: vp9dsp_template.c:2070
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:352
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
VAR_OHSUB
@ VAR_OHSUB
Definition: vf_scale.c:96
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:642
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
ff_vf_scale
const AVFilter ff_vf_scale
Definition: vf_scale.c:1326
snprintf
#define snprintf
Definition: snprintf.h:34
ScaleContext::size_str
char * size_str
Definition: vf_scale.c:147
VAR_S2R_MAIN_VSUB
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:119
AVCHROMA_LOC_BOTTOMLEFT
@ AVCHROMA_LOC_BOTTOMLEFT
Definition: pixfmt.h:742
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
ff_framesync_preinit
void ff_framesync_preinit(FFFrameSync *fs)
Pre-initialize a frame sync structure.
Definition: framesync.c:78
src
#define src
Definition: vp8dsp.c:248
swscale.h
ScaleContext::h
int h
Definition: vf_scale.c:146
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:312
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3090
ScaleContext::in_v_chr_pos
int in_v_chr_pos
Definition: vf_scale.c:176