FFmpeg
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <float.h>
27 #include <stdio.h>
28 #include <string.h>
29 
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "formats.h"
33 #include "framesync.h"
34 #include "scale_eval.h"
35 #include "video.h"
36 #include "libavutil/eval.h"
38 #include "libavutil/internal.h"
39 #include "libavutil/mem.h"
40 #include "libavutil/opt.h"
41 #include "libavutil/parseutils.h"
42 #include "libavutil/pixdesc.h"
43 #include "libswscale/swscale.h"
44 
45 static const char *const var_names[] = {
46  "in_w", "iw",
47  "in_h", "ih",
48  "out_w", "ow",
49  "out_h", "oh",
50  "a",
51  "sar",
52  "dar",
53  "hsub",
54  "vsub",
55  "ohsub",
56  "ovsub",
57  "n",
58  "t",
59 #if FF_API_FRAME_PKT
60  "pos",
61 #endif
62  "ref_w", "rw",
63  "ref_h", "rh",
64  "ref_a",
65  "ref_sar",
66  "ref_dar", "rdar",
67  "ref_hsub",
68  "ref_vsub",
69  "ref_n",
70  "ref_t",
71  "ref_pos",
72  /* Legacy variables for scale2ref */
73  "main_w",
74  "main_h",
75  "main_a",
76  "main_sar",
77  "main_dar", "mdar",
78  "main_hsub",
79  "main_vsub",
80  "main_n",
81  "main_t",
82  "main_pos",
83  NULL
84 };
85 
86 enum var_name {
100 #if FF_API_FRAME_PKT
101  VAR_POS,
102 #endif
124 };
125 
126 enum EvalMode {
130 };
131 
132 typedef struct ScaleContext {
133  const AVClass *class;
134  struct SwsContext *sws; ///< software scaler context
135  struct SwsContext *isws[2]; ///< software scaler context for interlaced material
136  // context used for forwarding options to sws
139 
140  /**
141  * New dimensions. Special values are:
142  * 0 = original width/height
143  * -1 = keep original aspect
144  * -N = try to keep aspect but make sure it is divisible by N
145  */
146  int w, h;
147  char *size_str;
148  double param[2]; // sws params
149 
150  int hsub, vsub; ///< chroma subsampling
151  int slice_y; ///< top of current output slice
152  int input_is_pal; ///< set to 1 if the input format is paletted
153  int output_is_pal; ///< set to 1 if the output format is paletted
155  int uses_ref;
156 
157  char *w_expr; ///< width expression string
158  char *h_expr; ///< height expression string
162 
163  char *flags_str;
164 
167 
168  int in_range;
170 
177 
180 
181  int eval_mode; ///< expression evaluation mode
182 
183 } ScaleContext;
184 
186 
187 static int config_props(AVFilterLink *outlink);
188 
190 {
191  ScaleContext *scale = ctx->priv;
192  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
193 
194  if (!scale->w_pexpr && !scale->h_pexpr)
195  return AVERROR(EINVAL);
196 
197  if (scale->w_pexpr)
198  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
199  if (scale->h_pexpr)
200  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
201 
202  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
203  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
204  return AVERROR(EINVAL);
205  }
206 
207  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
208  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
209  return AVERROR(EINVAL);
210  }
211 
212  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
213  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
214  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
215  }
216 
217  if (vars_w[VAR_REF_W] || vars_h[VAR_REF_W] ||
218  vars_w[VAR_RW] || vars_h[VAR_RW] ||
219  vars_w[VAR_REF_H] || vars_h[VAR_REF_H] ||
220  vars_w[VAR_RH] || vars_h[VAR_RH] ||
221  vars_w[VAR_REF_A] || vars_h[VAR_REF_A] ||
222  vars_w[VAR_REF_SAR] || vars_h[VAR_REF_SAR] ||
223  vars_w[VAR_REF_DAR] || vars_h[VAR_REF_DAR] ||
224  vars_w[VAR_RDAR] || vars_h[VAR_RDAR] ||
225  vars_w[VAR_REF_HSUB] || vars_h[VAR_REF_HSUB] ||
226  vars_w[VAR_REF_VSUB] || vars_h[VAR_REF_VSUB] ||
227  vars_w[VAR_REF_N] || vars_h[VAR_REF_N] ||
228  vars_w[VAR_REF_T] || vars_h[VAR_REF_T] ||
229  vars_w[VAR_REF_POS] || vars_h[VAR_REF_POS]) {
230  scale->uses_ref = 1;
231  }
232 
233  if (ctx->filter != &ff_vf_scale2ref &&
234  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
235  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
236  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
237  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
238  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
239  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
240  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
241  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
242  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
243  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
244  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
245  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
246  return AVERROR(EINVAL);
247  }
248 
249  if (ctx->filter != &ff_vf_scale2ref &&
250  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
251  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
252  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
253  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
254  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
255  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
256  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
257  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
258  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
259  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
260  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
261  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
262  return AVERROR(EINVAL);
263  }
264 
265  if (scale->eval_mode == EVAL_MODE_INIT &&
266  (vars_w[VAR_N] || vars_h[VAR_N] ||
267  vars_w[VAR_T] || vars_h[VAR_T] ||
269  vars_w[VAR_POS] || vars_h[VAR_POS] ||
270 #endif
271  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
272  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
273  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
274  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
275  return AVERROR(EINVAL);
276  }
277 
278  return 0;
279 }
280 
281 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
282 {
283  ScaleContext *scale = ctx->priv;
284  int ret, is_inited = 0;
285  char *old_str_expr = NULL;
286  AVExpr *old_pexpr = NULL;
287 
288  if (str_expr) {
289  old_str_expr = av_strdup(str_expr);
290  if (!old_str_expr)
291  return AVERROR(ENOMEM);
292  av_opt_set(scale, var, args, 0);
293  }
294 
295  if (*pexpr_ptr) {
296  old_pexpr = *pexpr_ptr;
297  *pexpr_ptr = NULL;
298  is_inited = 1;
299  }
300 
301  ret = av_expr_parse(pexpr_ptr, args, var_names,
302  NULL, NULL, NULL, NULL, 0, ctx);
303  if (ret < 0) {
304  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
305  goto revert;
306  }
307 
308  ret = check_exprs(ctx);
309  if (ret < 0)
310  goto revert;
311 
312  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
313  goto revert;
314 
315  av_expr_free(old_pexpr);
316  old_pexpr = NULL;
317  av_freep(&old_str_expr);
318 
319  return 0;
320 
321 revert:
322  av_expr_free(*pexpr_ptr);
323  *pexpr_ptr = NULL;
324  if (old_str_expr) {
325  av_opt_set(scale, var, old_str_expr, 0);
326  av_free(old_str_expr);
327  }
328  if (old_pexpr)
329  *pexpr_ptr = old_pexpr;
330 
331  return ret;
332 }
333 
335 {
336  ScaleContext *scale = ctx->priv;
337  int ret;
338 
339  scale->sws_opts = sws_alloc_context();
340  if (!scale->sws_opts)
341  return AVERROR(ENOMEM);
342 
343  // set threads=0, so we can later check whether the user modified it
344  ret = av_opt_set_int(scale->sws_opts, "threads", 0, 0);
345  if (ret < 0)
346  return ret;
347 
349 
350  return 0;
351 }
352 
353 static const int sws_colorspaces[] = {
362  -1
363 };
364 
365 static int do_scale(FFFrameSync *fs);
366 
368 {
369  ScaleContext *scale = ctx->priv;
370  int64_t threads;
371  int ret;
372 
373  if (ctx->filter == &ff_vf_scale2ref)
374  av_log(ctx, AV_LOG_WARNING, "scale2ref is deprecated, use scale=rw:rh instead\n");
375 
376  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
378  "Size and width/height expressions cannot be set at the same time.\n");
379  return AVERROR(EINVAL);
380  }
381 
382  if (scale->w_expr && !scale->h_expr)
383  FFSWAP(char *, scale->w_expr, scale->size_str);
384 
385  if (scale->size_str) {
386  char buf[32];
387  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
389  "Invalid size '%s'\n", scale->size_str);
390  return ret;
391  }
392  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
393  av_opt_set(scale, "w", buf, 0);
394  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
395  av_opt_set(scale, "h", buf, 0);
396  }
397  if (!scale->w_expr)
398  av_opt_set(scale, "w", "iw", 0);
399  if (!scale->h_expr)
400  av_opt_set(scale, "h", "ih", 0);
401 
402  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
403  if (ret < 0)
404  return ret;
405 
406  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
407  if (ret < 0)
408  return ret;
409 
410  if (scale->in_color_matrix != -1 &&
411  !ff_fmt_is_in(scale->in_color_matrix, sws_colorspaces)) {
412  av_log(ctx, AV_LOG_ERROR, "Unsupported input color matrix '%s'\n",
413  av_color_space_name(scale->in_color_matrix));
414  return AVERROR(EINVAL);
415  }
416 
417  if (!ff_fmt_is_in(scale->out_color_matrix, sws_colorspaces)) {
418  av_log(ctx, AV_LOG_ERROR, "Unsupported output color matrix '%s'\n",
419  av_color_space_name(scale->out_color_matrix));
420  return AVERROR(EINVAL);
421  }
422 
423  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
424  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
425 
426  if (scale->flags_str && *scale->flags_str) {
427  ret = av_opt_set(scale->sws_opts, "sws_flags", scale->flags_str, 0);
428  if (ret < 0)
429  return ret;
430  }
431 
432  for (int i = 0; i < FF_ARRAY_ELEMS(scale->param); i++)
433  if (scale->param[i] != DBL_MAX) {
434  ret = av_opt_set_double(scale->sws_opts, i ? "param1" : "param0",
435  scale->param[i], 0);
436  if (ret < 0)
437  return ret;
438  }
439 
440  // use generic thread-count if the user did not set it explicitly
441  ret = av_opt_get_int(scale->sws_opts, "threads", 0, &threads);
442  if (ret < 0)
443  return ret;
444  if (!threads)
445  av_opt_set_int(scale->sws_opts, "threads", ff_filter_get_nb_threads(ctx), 0);
446 
447  if (ctx->filter != &ff_vf_scale2ref && scale->uses_ref) {
448  AVFilterPad pad = {
449  .name = "ref",
450  .type = AVMEDIA_TYPE_VIDEO,
451  };
452  ret = ff_append_inpad(ctx, &pad);
453  if (ret < 0)
454  return ret;
455  }
456 
457  return 0;
458 }
459 
461 {
462  ScaleContext *scale = ctx->priv;
463  av_expr_free(scale->w_pexpr);
464  av_expr_free(scale->h_pexpr);
465  scale->w_pexpr = scale->h_pexpr = NULL;
467  sws_freeContext(scale->sws_opts);
468  sws_freeContext(scale->sws);
469  sws_freeContext(scale->isws[0]);
470  sws_freeContext(scale->isws[1]);
471  scale->sws = NULL;
472 }
473 
475 {
476  ScaleContext *scale = ctx->priv;
478  const AVPixFmtDescriptor *desc;
479  enum AVPixelFormat pix_fmt;
480  int ret;
481 
482  desc = NULL;
483  formats = NULL;
484  while ((desc = av_pix_fmt_desc_next(desc))) {
488  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
489  return ret;
490  }
491  }
492  if ((ret = ff_formats_ref(formats, &ctx->inputs[0]->outcfg.formats)) < 0)
493  return ret;
494 
495  desc = NULL;
496  formats = NULL;
497  while ((desc = av_pix_fmt_desc_next(desc))) {
501  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
502  return ret;
503  }
504  }
505  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.formats)) < 0)
506  return ret;
507 
508  /* accept all supported inputs, even if user overrides their properties */
510  &ctx->inputs[0]->outcfg.color_spaces)) < 0)
511  return ret;
512 
514  &ctx->inputs[0]->outcfg.color_ranges)) < 0)
515  return ret;
516 
517  /* propagate output properties if overridden */
518  formats = scale->out_color_matrix != AVCOL_SPC_UNSPECIFIED
519  ? ff_make_formats_list_singleton(scale->out_color_matrix)
521  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.color_spaces)) < 0)
522  return ret;
523 
524  formats = scale->out_range != AVCOL_RANGE_UNSPECIFIED
527  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.color_ranges)) < 0)
528  return ret;
529 
530  return 0;
531 }
532 
534 {
535  ScaleContext *scale = ctx->priv;
536  const char scale2ref = ctx->filter == &ff_vf_scale2ref;
537  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
538  const AVFilterLink *outlink = ctx->outputs[0];
540  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
541  char *expr;
542  int eval_w, eval_h;
543  int ret;
544  double res;
545  const AVPixFmtDescriptor *main_desc;
546  const AVFilterLink *main_link;
547 
548  if (scale2ref) {
549  main_link = ctx->inputs[0];
550  main_desc = av_pix_fmt_desc_get(main_link->format);
551  }
552 
553  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
554  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
555  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
556  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
557  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
558  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
559  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
560  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
561  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
562  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
563  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
564  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
565 
566  if (scale2ref) {
567  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
568  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
569  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
570  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
571  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
572  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
573  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
574  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
575  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
576  }
577 
578  if (scale->uses_ref) {
579  const AVFilterLink *reflink = ctx->inputs[1];
580  const AVPixFmtDescriptor *ref_desc = av_pix_fmt_desc_get(reflink->format);
581  scale->var_values[VAR_REF_W] = scale->var_values[VAR_RW] = reflink->w;
582  scale->var_values[VAR_REF_H] = scale->var_values[VAR_RH] = reflink->h;
583  scale->var_values[VAR_REF_A] = (double) reflink->w / reflink->h;
584  scale->var_values[VAR_REF_SAR] = reflink->sample_aspect_ratio.num ?
585  (double) reflink->sample_aspect_ratio.num / reflink->sample_aspect_ratio.den : 1;
586  scale->var_values[VAR_REF_DAR] = scale->var_values[VAR_RDAR] =
587  scale->var_values[VAR_REF_A] * scale->var_values[VAR_REF_SAR];
588  scale->var_values[VAR_REF_HSUB] = 1 << ref_desc->log2_chroma_w;
589  scale->var_values[VAR_REF_VSUB] = 1 << ref_desc->log2_chroma_h;
590  }
591 
592  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
593  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
594 
595  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
596  if (isnan(res)) {
597  expr = scale->h_expr;
598  ret = AVERROR(EINVAL);
599  goto fail;
600  }
601  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
602 
603  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
604  if (isnan(res)) {
605  expr = scale->w_expr;
606  ret = AVERROR(EINVAL);
607  goto fail;
608  }
609  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
610 
611  scale->w = eval_w;
612  scale->h = eval_h;
613 
614  return 0;
615 
616 fail:
618  "Error when evaluating the expression '%s'.\n", expr);
619  return ret;
620 }
621 
622 static void calc_chroma_pos(int *h_pos_out, int *v_pos_out, int chroma_loc,
623  int h_pos_override, int v_pos_override,
624  int h_sub, int v_sub, int index)
625 {
626  int h_pos, v_pos;
627 
628  /* Explicitly default to center siting for compatibility with swscale */
629  if (chroma_loc == AVCHROMA_LOC_UNSPECIFIED)
630  chroma_loc = AVCHROMA_LOC_CENTER;
631 
632  /* av_chroma_location_enum_to_pos() always gives us values in the range from
633  * 0 to 256, but we need to adjust this to the true value range of the
634  * subsampling grid, which may be larger for h/v_sub > 1 */
635  av_chroma_location_enum_to_pos(&h_pos, &v_pos, chroma_loc);
636  h_pos *= (1 << h_sub) - 1;
637  v_pos *= (1 << v_sub) - 1;
638 
639  if (h_pos_override != -513)
640  h_pos = h_pos_override;
641  if (v_pos_override != -513)
642  v_pos = v_pos_override;
643 
644  /* Fix vertical chroma position for interlaced frames */
645  if (v_sub && index > 0) {
646  /* When vertically subsampling, chroma samples are effectively only
647  * placed next to even rows. To access them from the odd field, we need
648  * to account for this shift by offsetting the distance of one luma row.
649  *
650  * For 4x vertical subsampling (v_sub == 2), they are only placed
651  * next to every *other* even row, so we need to shift by three luma
652  * rows to get to the chroma sample. */
653  if (index == 2)
654  v_pos += (256 << v_sub) - 256;
655 
656  /* Luma row distance is doubled for fields, so halve offsets */
657  v_pos >>= 1;
658  }
659 
660  /* Explicitly strip chroma offsets when not subsampling, because it
661  * interferes with the operation of flags like SWS_FULL_CHR_H_INP */
662  *h_pos_out = h_sub ? h_pos : -513;
663  *v_pos_out = v_sub ? v_pos : -513;
664 }
665 
666 static int config_props(AVFilterLink *outlink)
667 {
668  AVFilterContext *ctx = outlink->src;
669  AVFilterLink *inlink0 = outlink->src->inputs[0];
670  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref ?
671  outlink->src->inputs[1] :
672  outlink->src->inputs[0];
673  enum AVPixelFormat outfmt = outlink->format;
675  const AVPixFmtDescriptor *outdesc = av_pix_fmt_desc_get(outfmt);
676  ScaleContext *scale = ctx->priv;
677  uint8_t *flags_val = NULL;
678  int in_range, in_colorspace;
679  int ret;
680 
681  if ((ret = scale_eval_dimensions(ctx)) < 0)
682  goto fail;
683 
684  outlink->w = scale->w;
685  outlink->h = scale->h;
686 
687  ret = ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
688  scale->force_original_aspect_ratio,
689  scale->force_divisible_by);
690 
691  if (ret < 0)
692  goto fail;
693 
694  if (outlink->w > INT_MAX ||
695  outlink->h > INT_MAX ||
696  (outlink->h * inlink->w) > INT_MAX ||
697  (outlink->w * inlink->h) > INT_MAX)
698  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
699 
700  /* TODO: make algorithm configurable */
701 
702  scale->input_is_pal = desc->flags & AV_PIX_FMT_FLAG_PAL;
703  if (outfmt == AV_PIX_FMT_PAL8) outfmt = AV_PIX_FMT_BGR8;
704  scale->output_is_pal = av_pix_fmt_desc_get(outfmt)->flags & AV_PIX_FMT_FLAG_PAL;
705 
706  in_range = scale->in_range;
707  if (in_range == AVCOL_RANGE_UNSPECIFIED)
708  in_range = inlink0->color_range;
709 
710  in_colorspace = scale->in_color_matrix;
711  if (in_colorspace == -1 /* auto */)
712  in_colorspace = inlink0->colorspace;
713 
714  if (scale->sws)
715  sws_freeContext(scale->sws);
716  if (scale->isws[0])
717  sws_freeContext(scale->isws[0]);
718  if (scale->isws[1])
719  sws_freeContext(scale->isws[1]);
720  scale->isws[0] = scale->isws[1] = scale->sws = NULL;
721  if (inlink0->w == outlink->w &&
722  inlink0->h == outlink->h &&
723  in_range == outlink->color_range &&
724  in_colorspace == outlink->colorspace &&
725  inlink0->format == outlink->format &&
726  scale->in_chroma_loc == scale->out_chroma_loc)
727  ;
728  else {
729  struct SwsContext **swscs[3] = {&scale->sws, &scale->isws[0], &scale->isws[1]};
730  int i;
731 
732  for (i = 0; i < 3; i++) {
733  int in_full, out_full, brightness, contrast, saturation;
734  int h_chr_pos, v_chr_pos;
735  const int *inv_table, *table;
736  struct SwsContext *const s = sws_alloc_context();
737  if (!s)
738  return AVERROR(ENOMEM);
739  *swscs[i] = s;
740 
741  ret = av_opt_copy(s, scale->sws_opts);
742  if (ret < 0)
743  return ret;
744 
745  av_opt_set_int(s, "srcw", inlink0 ->w, 0);
746  av_opt_set_int(s, "srch", inlink0 ->h >> !!i, 0);
747  av_opt_set_int(s, "src_format", inlink0->format, 0);
748  av_opt_set_int(s, "dstw", outlink->w, 0);
749  av_opt_set_int(s, "dsth", outlink->h >> !!i, 0);
750  av_opt_set_int(s, "dst_format", outfmt, 0);
751  if (in_range != AVCOL_RANGE_UNSPECIFIED)
752  av_opt_set_int(s, "src_range",
753  in_range == AVCOL_RANGE_JPEG, 0);
754  if (outlink->color_range != AVCOL_RANGE_UNSPECIFIED)
755  av_opt_set_int(s, "dst_range",
756  outlink->color_range == AVCOL_RANGE_JPEG, 0);
757 
758  calc_chroma_pos(&h_chr_pos, &v_chr_pos, scale->in_chroma_loc,
759  scale->in_h_chr_pos, scale->in_v_chr_pos,
760  desc->log2_chroma_w, desc->log2_chroma_h, i);
761  av_opt_set_int(s, "src_h_chr_pos", h_chr_pos, 0);
762  av_opt_set_int(s, "src_v_chr_pos", v_chr_pos, 0);
763 
764  calc_chroma_pos(&h_chr_pos, &v_chr_pos, scale->out_chroma_loc,
765  scale->out_h_chr_pos, scale->out_v_chr_pos,
766  outdesc->log2_chroma_w, outdesc->log2_chroma_h, i);
767  av_opt_set_int(s, "dst_h_chr_pos", h_chr_pos, 0);
768  av_opt_set_int(s, "dst_v_chr_pos", v_chr_pos, 0);
769 
770  if ((ret = sws_init_context(s, NULL, NULL)) < 0)
771  return ret;
772 
773  sws_getColorspaceDetails(s, (int **)&inv_table, &in_full,
774  (int **)&table, &out_full,
776 
777  if (scale->in_color_matrix == -1 /* auto */)
778  inv_table = sws_getCoefficients(inlink0->colorspace);
779  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
780  inv_table = sws_getCoefficients(scale->in_color_matrix);
781  if (outlink->colorspace != AVCOL_SPC_UNSPECIFIED)
783  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
784  table = inv_table;
785 
786  sws_setColorspaceDetails(s, inv_table, in_full,
787  table, out_full,
789 
790  if (!scale->interlaced)
791  break;
792  }
793  }
794 
795  if (inlink0->sample_aspect_ratio.num){
796  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
797  } else
798  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
799 
800  if (scale->sws)
801  av_opt_get(scale->sws, "sws_flags", 0, &flags_val);
802 
803  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d -> w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d flags:%s\n",
804  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
805  av_color_space_name(inlink->colorspace), av_color_range_name(inlink->color_range),
806  inlink->sample_aspect_ratio.num, inlink->sample_aspect_ratio.den,
807  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
809  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
810  flags_val);
811  av_freep(&flags_val);
812 
813  if (ctx->filter != &ff_vf_scale2ref) {
815  ret = ff_framesync_init(&scale->fs, ctx, ctx->nb_inputs);
816  if (ret < 0)
817  return ret;
818  scale->fs.on_event = do_scale;
819  scale->fs.in[0].time_base = ctx->inputs[0]->time_base;
820  scale->fs.in[0].sync = 1;
821  scale->fs.in[0].before = EXT_STOP;
822  scale->fs.in[0].after = EXT_STOP;
823  if (scale->uses_ref) {
824  av_assert0(ctx->nb_inputs == 2);
825  scale->fs.in[1].time_base = ctx->inputs[1]->time_base;
826  scale->fs.in[1].sync = 0;
827  scale->fs.in[1].before = EXT_NULL;
828  scale->fs.in[1].after = EXT_INFINITY;
829  }
830 
832  if (ret < 0)
833  return ret;
834  }
835 
836  return 0;
837 
838 fail:
839  return ret;
840 }
841 
842 static int config_props_ref(AVFilterLink *outlink)
843 {
844  AVFilterLink *inlink = outlink->src->inputs[1];
846  FilterLink *ol = ff_filter_link(outlink);
847 
848  outlink->w = inlink->w;
849  outlink->h = inlink->h;
850  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
851  outlink->time_base = inlink->time_base;
852  ol->frame_rate = il->frame_rate;
853  outlink->colorspace = inlink->colorspace;
854  outlink->color_range = inlink->color_range;
855 
856  return 0;
857 }
858 
859 static int request_frame(AVFilterLink *outlink)
860 {
861  return ff_request_frame(outlink->src->inputs[0]);
862 }
863 
864 static int request_frame_ref(AVFilterLink *outlink)
865 {
866  return ff_request_frame(outlink->src->inputs[1]);
867 }
868 
869 static void frame_offset(AVFrame *frame, int dir, int is_pal)
870 {
871  for (int i = 0; i < 4 && frame->data[i]; i++) {
872  if (i == 1 && is_pal)
873  break;
874  frame->data[i] += frame->linesize[i] * dir;
875  }
876 }
877 
879  int field)
880 {
881  int orig_h_src = src->height;
882  int orig_h_dst = dst->height;
883  int ret;
884 
885  // offset the data pointers for the bottom field
886  if (field) {
887  frame_offset(src, 1, scale->input_is_pal);
888  frame_offset(dst, 1, scale->output_is_pal);
889  }
890 
891  // take every second line
892  for (int i = 0; i < 4; i++) {
893  src->linesize[i] *= 2;
894  dst->linesize[i] *= 2;
895  }
896  src->height /= 2;
897  dst->height /= 2;
898 
899  ret = sws_scale_frame(scale->isws[field], dst, src);
900  if (ret < 0)
901  return ret;
902 
903  // undo the changes we made above
904  for (int i = 0; i < 4; i++) {
905  src->linesize[i] /= 2;
906  dst->linesize[i] /= 2;
907  }
908  src->height = orig_h_src;
909  dst->height = orig_h_dst;
910 
911  if (field) {
912  frame_offset(src, -1, scale->input_is_pal);
913  frame_offset(dst, -1, scale->output_is_pal);
914  }
915 
916  return 0;
917 }
918 
919 /* Takes over ownership of *frame_in, passes ownership of *frame_out to caller */
920 static int scale_frame(AVFilterLink *link, AVFrame **frame_in,
921  AVFrame **frame_out)
922 {
924  AVFilterContext *ctx = link->dst;
925  ScaleContext *scale = ctx->priv;
926  AVFilterLink *outlink = ctx->outputs[0];
927  AVFrame *out, *in = *frame_in;
929  char buf[32];
930  int ret;
931  int frame_changed;
932 
933  *frame_in = NULL;
934  if (in->colorspace == AVCOL_SPC_YCGCO)
935  av_log(link->dst, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
936 
937  frame_changed = in->width != link->w ||
938  in->height != link->h ||
939  in->format != link->format ||
942  in->colorspace != link->colorspace ||
943  in->color_range != link->color_range;
944 
945  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
946  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
947 
948  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
949  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
950 
951  if (scale->eval_mode == EVAL_MODE_FRAME &&
952  !frame_changed &&
953  ctx->filter != &ff_vf_scale2ref &&
954  !(vars_w[VAR_N] || vars_w[VAR_T]
956  || vars_w[VAR_POS]
957 #endif
958  ) &&
959  !(vars_h[VAR_N] || vars_h[VAR_T]
961  || vars_h[VAR_POS]
962 #endif
963  ) &&
964  scale->w && scale->h)
965  goto scale;
966 
967  if (scale->eval_mode == EVAL_MODE_INIT) {
968  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
969  av_opt_set(scale, "w", buf, 0);
970  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
971  av_opt_set(scale, "h", buf, 0);
972 
973  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
974  if (ret < 0)
975  goto err;
976 
977  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
978  if (ret < 0)
979  goto err;
980  }
981 
982  if (ctx->filter == &ff_vf_scale2ref) {
983  scale->var_values[VAR_S2R_MAIN_N] = inl->frame_count_out;
984  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
985 #if FF_API_FRAME_PKT
987  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
989 #endif
990  } else {
991  scale->var_values[VAR_N] = inl->frame_count_out;
992  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
993 #if FF_API_FRAME_PKT
995  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
997 #endif
998  }
999 
1000  link->dst->inputs[0]->format = in->format;
1001  link->dst->inputs[0]->w = in->width;
1002  link->dst->inputs[0]->h = in->height;
1003  link->dst->inputs[0]->colorspace = in->colorspace;
1004  link->dst->inputs[0]->color_range = in->color_range;
1005 
1006  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
1007  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
1008 
1009  if ((ret = config_props(outlink)) < 0)
1010  goto err;
1011  }
1012 
1013 scale:
1014  if (!scale->sws) {
1015  *frame_out = in;
1016  return 0;
1017  }
1018 
1019  scale->hsub = desc->log2_chroma_w;
1020  scale->vsub = desc->log2_chroma_h;
1021 
1022  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
1023  if (!out) {
1024  ret = AVERROR(ENOMEM);
1025  goto err;
1026  }
1027 
1028  av_frame_copy_props(out, in);
1029  out->width = outlink->w;
1030  out->height = outlink->h;
1031  out->color_range = outlink->color_range;
1032  out->colorspace = outlink->colorspace;
1033  if (scale->out_chroma_loc != AVCHROMA_LOC_UNSPECIFIED)
1034  out->chroma_location = scale->out_chroma_loc;
1035 
1036  if (scale->output_is_pal)
1037  avpriv_set_systematic_pal2((uint32_t*)out->data[1], outlink->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : outlink->format);
1038 
1039  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
1040  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
1041  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
1042  INT_MAX);
1043 
1044  if (scale->interlaced>0 || (scale->interlaced<0 &&
1045  (in->flags & AV_FRAME_FLAG_INTERLACED))) {
1046  ret = scale_field(scale, out, in, 0);
1047  if (ret >= 0)
1048  ret = scale_field(scale, out, in, 1);
1049  } else {
1050  ret = sws_scale_frame(scale->sws, out, in);
1051  }
1052 
1053  if (ret < 0)
1054  av_frame_free(&out);
1055  *frame_out = out;
1056 
1057 err:
1058  av_frame_free(&in);
1059  return ret;
1060 }
1061 
1063 {
1064  AVFilterContext *ctx = fs->parent;
1065  ScaleContext *scale = ctx->priv;
1066  AVFilterLink *outlink = ctx->outputs[0];
1067  AVFrame *out, *in = NULL, *ref = NULL;
1068  int ret = 0, frame_changed;
1069 
1070  ret = ff_framesync_get_frame(fs, 0, &in, 1);
1071  if (ret < 0)
1072  goto err;
1073 
1074  if (scale->uses_ref) {
1075  ret = ff_framesync_get_frame(fs, 1, &ref, 0);
1076  if (ret < 0)
1077  goto err;
1078  }
1079 
1080  if (ref) {
1081  AVFilterLink *reflink = ctx->inputs[1];
1082  FilterLink *rl = ff_filter_link(reflink);
1083 
1084  frame_changed = ref->width != reflink->w ||
1085  ref->height != reflink->h ||
1086  ref->format != reflink->format ||
1087  ref->sample_aspect_ratio.den != reflink->sample_aspect_ratio.den ||
1088  ref->sample_aspect_ratio.num != reflink->sample_aspect_ratio.num ||
1089  ref->colorspace != reflink->colorspace ||
1090  ref->color_range != reflink->color_range;
1091 
1092  if (frame_changed) {
1093  reflink->format = ref->format;
1094  reflink->w = ref->width;
1095  reflink->h = ref->height;
1096  reflink->sample_aspect_ratio.num = ref->sample_aspect_ratio.num;
1097  reflink->sample_aspect_ratio.den = ref->sample_aspect_ratio.den;
1098  reflink->colorspace = ref->colorspace;
1099  reflink->color_range = ref->color_range;
1100 
1101  ret = config_props(outlink);
1102  if (ret < 0)
1103  goto err;
1104  }
1105 
1106  if (scale->eval_mode == EVAL_MODE_FRAME) {
1107  scale->var_values[VAR_REF_N] = rl->frame_count_out;
1108  scale->var_values[VAR_REF_T] = TS2T(ref->pts, reflink->time_base);
1109 #if FF_API_FRAME_PKT
1111  scale->var_values[VAR_REF_POS] = ref->pkt_pos == -1 ? NAN : ref->pkt_pos;
1113 #endif
1114  }
1115  }
1116 
1117  ret = scale_frame(ctx->inputs[0], &in, &out);
1118  if (ret < 0)
1119  goto err;
1120 
1121  av_assert0(out);
1122  out->pts = av_rescale_q(fs->pts, fs->time_base, outlink->time_base);
1123  return ff_filter_frame(outlink, out);
1124 
1125 err:
1126  av_frame_free(&in);
1127  return ret;
1128 }
1129 
1131 {
1132  AVFilterContext *ctx = link->dst;
1133  AVFilterLink *outlink = ctx->outputs[0];
1134  AVFrame *out;
1135  int ret;
1136 
1137  ret = scale_frame(link, &in, &out);
1138  if (out)
1139  return ff_filter_frame(outlink, out);
1140 
1141  return ret;
1142 }
1143 
1145 {
1147  ScaleContext *scale = link->dst->priv;
1148  AVFilterLink *outlink = link->dst->outputs[1];
1149  int frame_changed;
1150 
1151  frame_changed = in->width != link->w ||
1152  in->height != link->h ||
1153  in->format != link->format ||
1156  in->colorspace != link->colorspace ||
1157  in->color_range != link->color_range;
1158 
1159  if (frame_changed) {
1160  link->format = in->format;
1161  link->w = in->width;
1162  link->h = in->height;
1165  link->colorspace = in->colorspace;
1166  link->color_range = in->color_range;
1167 
1168  config_props_ref(outlink);
1169  }
1170 
1171  if (scale->eval_mode == EVAL_MODE_FRAME) {
1172  scale->var_values[VAR_N] = l->frame_count_out;
1173  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
1174 #if FF_API_FRAME_PKT
1176  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
1178 #endif
1179  }
1180 
1181  return ff_filter_frame(outlink, in);
1182 }
1183 
1184 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
1185  char *res, int res_len, int flags)
1186 {
1187  ScaleContext *scale = ctx->priv;
1188  char *str_expr;
1189  AVExpr **pexpr_ptr;
1190  int ret, w, h;
1191 
1192  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
1193  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
1194 
1195  if (w || h) {
1196  str_expr = w ? scale->w_expr : scale->h_expr;
1197  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
1198 
1199  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
1200  } else
1201  ret = AVERROR(ENOSYS);
1202 
1203  if (ret < 0)
1204  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
1205 
1206  return ret;
1207 }
1208 
1210 {
1211  ScaleContext *scale = ctx->priv;
1212  return ff_framesync_activate(&scale->fs);
1213 }
1214 
1215 static const AVClass *child_class_iterate(void **iter)
1216 {
1217  switch ((uintptr_t) *iter) {
1218  case 0:
1219  *iter = (void*)(uintptr_t) 1;
1220  return sws_get_class();
1221  case 1:
1222  *iter = (void*)(uintptr_t) 2;
1223  return &ff_framesync_class;
1224  }
1225 
1226  return NULL;
1227 }
1228 
1229 static void *child_next(void *obj, void *prev)
1230 {
1231  ScaleContext *s = obj;
1232  if (!prev)
1233  return s->sws_opts;
1234  if (prev == s->sws_opts)
1235  return &s->fs;
1236  return NULL;
1237 }
1238 
1239 #define OFFSET(x) offsetof(ScaleContext, x)
1240 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
1241 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
1242 
1243 static const AVOption scale_options[] = {
1244  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1245  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1246  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1247  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
1248  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "" }, .flags = FLAGS },
1249  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
1250  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1251  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1252  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color" },
1253  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, 0, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color"},
1254  { "auto", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = -1 }, 0, 0, FLAGS, .unit = "color" },
1255  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1256  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1257  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1258  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT709 }, 0, 0, FLAGS, .unit = "color" },
1259  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_FCC }, 0, 0, FLAGS, .unit = "color" },
1260  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_SMPTE240M }, 0, 0, FLAGS, .unit = "color" },
1261  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT2020_NCL }, 0, 0, FLAGS, .unit = "color" },
1262  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1263  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1264  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1265  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1266  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1267  { "limited",NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1268  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1269  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1270  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1271  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1272  { "in_chroma_loc", "set input chroma sample location", OFFSET(in_chroma_loc), AV_OPT_TYPE_INT, { .i64 = AVCHROMA_LOC_UNSPECIFIED }, 0, AVCHROMA_LOC_NB-1, .flags = FLAGS, .unit = "chroma_loc" },
1273  { "out_chroma_loc", "set output chroma sample location", OFFSET(out_chroma_loc), AV_OPT_TYPE_INT, { .i64 = AVCHROMA_LOC_UNSPECIFIED }, 0, AVCHROMA_LOC_NB-1, .flags = FLAGS, .unit = "chroma_loc" },
1274  {"auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_UNSPECIFIED}, 0, 0, FLAGS, .unit = "chroma_loc"},
1275  {"unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_UNSPECIFIED}, 0, 0, FLAGS, .unit = "chroma_loc"},
1276  {"left", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_LEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1277  {"center", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_CENTER}, 0, 0, FLAGS, .unit = "chroma_loc"},
1278  {"topleft", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_TOPLEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1279  {"top", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_TOP}, 0, 0, FLAGS, .unit = "chroma_loc"},
1280  {"bottomleft", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_BOTTOMLEFT}, 0, 0, FLAGS, .unit = "chroma_loc"},
1281  {"bottom", NULL, 0, AV_OPT_TYPE_CONST, {.i64=AVCHROMA_LOC_BOTTOM}, 0, 0, FLAGS, .unit = "chroma_loc"},
1282  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1283  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1284  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1285  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1286  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, .unit = "force_oar" },
1287  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1288  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1289  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1290  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
1291  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1292  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1293  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1294  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
1295  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
1296  { NULL }
1297 };
1298 
1299 static const AVClass scale_class = {
1300  .class_name = "scale",
1301  .item_name = av_default_item_name,
1302  .option = scale_options,
1303  .version = LIBAVUTIL_VERSION_INT,
1304  .category = AV_CLASS_CATEGORY_FILTER,
1305  .child_class_iterate = child_class_iterate,
1307 };
1308 
1310  {
1311  .name = "default",
1312  .type = AVMEDIA_TYPE_VIDEO,
1313  },
1314 };
1315 
1317  {
1318  .name = "default",
1319  .type = AVMEDIA_TYPE_VIDEO,
1320  .config_props = config_props,
1321  },
1322 };
1323 
1325  .name = "scale",
1326  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
1327  .preinit = preinit,
1328  .init = init,
1329  .uninit = uninit,
1330  .priv_size = sizeof(ScaleContext),
1331  .priv_class = &scale_class,
1335  .activate = activate,
1336  .process_command = process_command,
1338 };
1339 
1340 static const AVClass *scale2ref_child_class_iterate(void **iter)
1341 {
1342  const AVClass *c = *iter ? NULL : sws_get_class();
1343  *iter = (void*)(uintptr_t)c;
1344  return c;
1345 }
1346 
1347 static void *scale2ref_child_next(void *obj, void *prev)
1348 {
1349  ScaleContext *s = obj;
1350  if (!prev)
1351  return s->sws_opts;
1352  return NULL;
1353 }
1354 
1355 static const AVClass scale2ref_class = {
1356  .class_name = "scale(2ref)",
1357  .item_name = av_default_item_name,
1358  .option = scale_options,
1359  .version = LIBAVUTIL_VERSION_INT,
1360  .category = AV_CLASS_CATEGORY_FILTER,
1361  .child_class_iterate = scale2ref_child_class_iterate,
1363 };
1364 
1366  {
1367  .name = "default",
1368  .type = AVMEDIA_TYPE_VIDEO,
1369  .filter_frame = filter_frame,
1370  },
1371  {
1372  .name = "ref",
1373  .type = AVMEDIA_TYPE_VIDEO,
1374  .filter_frame = filter_frame_ref,
1375  },
1376 };
1377 
1379  {
1380  .name = "default",
1381  .type = AVMEDIA_TYPE_VIDEO,
1382  .config_props = config_props,
1383  .request_frame= request_frame,
1384  },
1385  {
1386  .name = "ref",
1387  .type = AVMEDIA_TYPE_VIDEO,
1388  .config_props = config_props_ref,
1389  .request_frame= request_frame_ref,
1390  },
1391 };
1392 
1394  .name = "scale2ref",
1395  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1396  .preinit = preinit,
1397  .init = init,
1398  .uninit = uninit,
1399  .priv_size = sizeof(ScaleContext),
1400  .priv_class = &scale2ref_class,
1404  .process_command = process_command,
1405 };
filter_frame_ref
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:1144
ScaleContext::param
double param[2]
Definition: vf_scale.c:148
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:116
formats
formats
Definition: signature.h:47
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:116
ScaleContext::fs
FFFrameSync fs
Definition: vf_scale.c:138
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:115
VAR_HSUB
@ VAR_HSUB
Definition: vf_scale.c:94
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:842
SwsContext::saturation
int saturation
Definition: swscale_internal.h:454
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:137
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:653
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
TFLAGS
#define TFLAGS
Definition: vf_scale.c:1241
ScaleContext::sws_opts
struct SwsContext * sws_opts
Definition: vf_scale.c:137
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:189
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
var_name
var_name
Definition: noise.c:47
ScaleContext::input_is_pal
int input_is_pal
set to 1 if the input format is paletted
Definition: vf_scale.c:152
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:435
VAR_REF_POS
@ VAR_REF_POS
Definition: vf_scale.c:112
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:302
out
FILE * out
Definition: movenc.c:55
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
ScaleContext
Definition: vf_scale.c:132
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1023
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_scale.c:474
AVCHROMA_LOC_BOTTOM
@ AVCHROMA_LOC_BOTTOM
Definition: pixfmt.h:712
ScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale.c:179
VAR_REF_N
@ VAR_REF_N
Definition: vf_scale.c:110
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:270
avfilter_vf_scale2ref_outputs
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1378
FLAGS
#define FLAGS
Definition: vf_scale.c:1240
int64_t
long long int64_t
Definition: coverity.c:34
ScaleContext::flags_str
char * flags_str
Definition: vf_scale.c:163
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:664
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:258
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:486
AVFrame::width
int width
Definition: frame.h:446
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:686
VAR_A
@ VAR_A
Definition: vf_scale.c:91
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:864
av_opt_set_double
int av_opt_set_double(void *obj, const char *name, double val, int search_flags)
Definition: opt.c:804
AVOption
AVOption.
Definition: opt.h:429
AVCOL_SPC_NB
@ AVCOL_SPC_NB
Not part of ABI.
Definition: pixfmt.h:629
scale_parse_expr
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:281
scale2ref_class
static const AVClass scale2ref_class
Definition: vf_scale.c:1355
table
static const uint16_t table[]
Definition: prosumer.c:203
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:859
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2972
VAR_REF_T
@ VAR_REF_T
Definition: vf_scale.c:111
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:472
VAR_S2R_MAIN_HSUB
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:118
ScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale.c:161
ScaleContext::out_range
int out_range
Definition: vf_scale.c:169
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale.c:117
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:610
float.h
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale.c:128
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:114
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:646
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
ScaleContext::in_h_chr_pos
int in_h_chr_pos
Definition: vf_scale.c:175
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale.c:90
video.h
ff_make_formats_list_singleton
AVFilterFormats * ff_make_formats_list_singleton(int fmt)
Equivalent to ff_make_format_list({const int[]}{ fmt, -1 })
Definition: formats.c:529
ScaleContext::out_chroma_loc
int out_chroma_loc
Definition: vf_scale.c:172
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:122
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:615
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
ff_append_inpad
int ff_append_inpad(AVFilterContext *f, AVFilterPad *p)
Append a new input/output pad to the filter's list of such pads.
Definition: avfilter.c:126
av_color_space_name
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:3341
VAR_DAR
@ VAR_DAR
Definition: vf_scale.c:93
avfilter_vf_scale_inputs
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:1309
fail
#define fail()
Definition: checkasm.h:188
VARS_NB
@ VARS_NB
Definition: vf_scale.c:123
frame_offset
static void frame_offset(AVFrame *frame, int dir, int is_pal)
Definition: vf_scale.c:869
ScaleContext::isws
struct SwsContext * isws[2]
software scaler context for interlaced material
Definition: vf_scale.c:135
VAR_REF_A
@ VAR_REF_A
Definition: vf_scale.c:105
ScaleContext::eval_mode
int eval_mode
expression evaluation mode
Definition: vf_scale.c:181
EXT_NULL
@ EXT_NULL
Ignore this stream and continue processing the other ones.
Definition: framesync.h:70
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale.c:88
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale.c:129
ScaleContext::in_chroma_loc
int in_chroma_loc
Definition: vf_scale.c:171
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:97
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:747
VAR_REF_W
@ VAR_REF_W
Definition: vf_scale.c:103
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:106
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
OFFSET
#define OFFSET(x)
Definition: vf_scale.c:1239
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: vf_scale.c:334
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
activate
static int activate(AVFilterContext *ctx)
Definition: vf_scale.c:1209
AV_PIX_FMT_BGR8
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:90
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_REF_H
@ VAR_REF_H
Definition: vf_scale.c:104
scale2ref_child_next
static void * scale2ref_child_next(void *obj, void *prev)
Definition: vf_scale.c:1347
ScaleContext::sws
struct SwsContext * sws
software scaler context
Definition: vf_scale.c:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
VAR_OH
@ VAR_OH
Definition: vf_scale.c:90
AVCHROMA_LOC_TOP
@ AVCHROMA_LOC_TOP
Definition: pixfmt.h:710
av_chroma_location_enum_to_pos
int av_chroma_location_enum_to_pos(int *xpos, int *ypos, enum AVChromaLocation pos)
Converts AVChromaLocation to swscale x/y chroma position.
Definition: pixdesc.c:3383
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:113
SwsContext::brightness
int brightness
Definition: swscale_internal.h:454
ScaleContext::slice_y
int slice_y
top of current output slice
Definition: vf_scale.c:151
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:616
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Underlying C type is double.
Definition: opt.h:267
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_scale.c:367
VAR_OVSUB
@ VAR_OVSUB
Definition: vf_scale.c:97
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
filters.h
ScaleContext::uses_ref
int uses_ref
Definition: vf_scale.c:155
ctx
AVFormatContext * ctx
Definition: movenc.c:49
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:1184
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVExpr
Definition: eval.c:158
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
ff_fmt_is_in
int ff_fmt_is_in(int fmt, const int *fmts)
Tell if an integer is contained in the provided -1-terminated list of integers.
Definition: formats.c:406
SwsContext::contrast
int contrast
Definition: swscale_internal.h:454
ScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale.c:159
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:259
avpriv_set_systematic_pal2
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:178
NAN
#define NAN
Definition: mathematics.h:115
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
ScaleContext::out_h_chr_pos
int out_h_chr_pos
Definition: vf_scale.c:173
av_color_range_name
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:3281
scale_field
static int scale_field(ScaleContext *scale, AVFrame *dst, AVFrame *src, int field)
Definition: vf_scale.c:878
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
VAR_REF_DAR
@ VAR_REF_DAR
Definition: vf_scale.c:107
ff_framesync_class
const AVClass ff_framesync_class
Definition: framesync.c:54
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
ScaleContext::out_v_chr_pos
int out_v_chr_pos
Definition: vf_scale.c:174
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:711
VAR_POS
@ VAR_POS
Definition: noise.c:56
VAR_T
@ VAR_T
Definition: vf_scale.c:99
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:200
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:707
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:709
isnan
#define isnan(x)
Definition: libm.h:340
scale2ref_child_class_iterate
static const AVClass * scale2ref_child_class_iterate(void **iter)
Definition: vf_scale.c:1340
ScaleContext::in_range
int in_range
Definition: vf_scale.c:168
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale.c:87
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
ff_add_format
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:504
parseutils.h
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1217
ScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale.c:160
double
double
Definition: af_crystalizer.c:132
AVCOL_SPC_YCGCO
@ AVCOL_SPC_YCGCO
used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
Definition: pixfmt.h:618
av_opt_get_int
int av_opt_get_int(void *obj, const char *name, int search_flags, int64_t *out_val)
Definition: opt.c:1217
sws_setColorspaceDetails
int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4], int srcRange, const int table[4], int dstRange, int brightness, int contrast, int saturation)
Definition: utils.c:1030
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:94
ff_vf_scale2ref
const AVFilter ff_vf_scale2ref
Definition: vf_scale.c:185
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:652
index
int index
Definition: gxfenc.c:90
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ScaleContext::out_color_matrix
int out_color_matrix
Definition: vf_scale.c:166
av_opt_set_int
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
Definition: opt.c:799
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
VAR_IW
@ VAR_IW
Definition: vf_scale.c:87
av_opt_copy
int av_opt_copy(void *dst, const void *src)
Copy options from src object into dest object.
Definition: opt.c:2114
eval.h
VAR_IH
@ VAR_IH
Definition: vf_scale.c:88
VAR_REF_SAR
@ VAR_REF_SAR
Definition: vf_scale.c:106
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVClass::child_next
void *(* child_next)(void *obj, void *prev)
Return next AVOptions-enabled child or NULL.
Definition: log.h:131
child_class_iterate
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:1215
ScaleContext::w
int w
New dimensions.
Definition: vf_scale.c:146
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:501
scale_frame
static int scale_frame(AVFilterLink *link, AVFrame **frame_in, AVFrame **frame_out)
Definition: vf_scale.c:920
VAR_RH
@ VAR_RH
Definition: vf_scale.c:104
TS2T
#define TS2T(ts, tb)
Definition: filters.h:274
AVCHROMA_LOC_UNSPECIFIED
@ AVCHROMA_LOC_UNSPECIFIED
Definition: pixfmt.h:706
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:684
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
scale_eval.h
VAR_RW
@ VAR_RW
Definition: vf_scale.c:103
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
ScaleContext::hsub
int hsub
Definition: vf_scale.c:150
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale.c:89
imgutils_internal.h
ff_all_color_ranges
AVFilterFormats * ff_all_color_ranges(void)
Construct an AVFilterFormats representing all possible color ranges.
Definition: formats.c:646
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2984
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:1130
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
sws_isSupportedInput
#define sws_isSupportedInput(x)
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:617
ScaleContext::vsub
int vsub
chroma subsampling
Definition: vf_scale.c:150
sws_scale_frame
int sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src)
Scale source data from src and write the output to dst.
Definition: swscale.c:1187
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:666
interlaced
uint8_t interlaced
Definition: mxfenc.c:2265
ScaleContext::output_is_pal
int output_is_pal
set to 1 if the output format is paletted
Definition: vf_scale.c:153
VAR_SAR
@ VAR_SAR
Definition: vf_scale.c:92
VAR_RDAR
@ VAR_RDAR
Definition: vf_scale.c:107
sws_isSupportedEndiannessConversion
int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt)
Definition: utils.c:370
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:620
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:120
internal.h
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:834
EvalMode
EvalMode
Definition: af_volume.h:39
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:612
ScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale.c:158
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:633
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:669
calc_chroma_pos
static void calc_chroma_pos(int *h_pos_out, int *v_pos_out, int chroma_loc, int h_pos_override, int v_pos_override, int h_sub, int v_sub, int index)
Definition: vf_scale.c:622
avfilter_vf_scale_outputs
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:1316
AVFilter
Filter definition.
Definition: avfilter.h:166
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ScaleContext::in_color_matrix
int in_color_matrix
Definition: vf_scale.c:165
VAR_REF_HSUB
@ VAR_REF_HSUB
Definition: vf_scale.c:108
child_next
static void * child_next(void *obj, void *prev)
Definition: vf_scale.c:1229
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:86
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:481
sws_getColorspaceDetails
int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table, int *srcRange, int **table, int *dstRange, int *brightness, int *contrast, int *saturation)
Definition: utils.c:1193
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
sws_init_context
av_warn_unused_result int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter)
Initialize the swscaler context sws_context.
Definition: utils.c:2068
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:121
scale_eval_dimensions
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:533
var_names
static const char *const var_names[]
Definition: vf_scale.c:45
AVFrame::height
int height
Definition: frame.h:446
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:117
scale_options
static const AVOption scale_options[]
Definition: vf_scale.c:1243
framesync.h
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2432
do_scale
static int do_scale(FFFrameSync *fs)
Definition: vf_scale.c:1062
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:708
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:614
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:460
ScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale.c:178
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
avfilter_vf_scale2ref_inputs
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1365
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
VAR_OW
@ VAR_OW
Definition: vf_scale.c:89
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
VAR_VSUB
@ VAR_VSUB
Definition: vf_scale.c:95
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
sws_getCoefficients
const int * sws_getCoefficients(int colorspace)
Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDet...
Definition: yuv2rgb.c:61
sws_colorspaces
static const int sws_colorspaces[]
Definition: vf_scale.c:353
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ScaleContext::interlaced
int interlaced
Definition: vf_scale.c:154
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
VAR_N
@ VAR_N
Definition: vf_scale.c:98
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
scale_class
static const AVClass scale_class
Definition: vf_scale.c:1299
ScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale.c:157
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale.c:127
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
AVCHROMA_LOC_NB
@ AVCHROMA_LOC_NB
Not part of ABI.
Definition: pixfmt.h:713
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
av_opt_get
int av_opt_get(void *obj, const char *name, int search_flags, uint8_t **out_val)
Definition: opt.c:1159
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
VAR_REF_VSUB
@ VAR_REF_VSUB
Definition: vf_scale.c:109
h
h
Definition: vp9dsp_template.c:2070
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:353
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
VAR_OHSUB
@ VAR_OHSUB
Definition: vf_scale.c:96
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:611
int
int
Definition: ffmpeg_filter.c:424
SwsContext
Definition: swscale_internal.h:299
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
ff_vf_scale
const AVFilter ff_vf_scale
Definition: vf_scale.c:1324
snprintf
#define snprintf
Definition: snprintf.h:34
ScaleContext::size_str
char * size_str
Definition: vf_scale.c:147
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: filters.h:235
VAR_S2R_MAIN_VSUB
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:119
AVCHROMA_LOC_BOTTOMLEFT
@ AVCHROMA_LOC_BOTTOMLEFT
Definition: pixfmt.h:711
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
ff_framesync_preinit
void ff_framesync_preinit(FFFrameSync *fs)
Pre-initialize a frame sync structure.
Definition: framesync.c:78
swscale.h
ScaleContext::h
int h
Definition: vf_scale.c:146
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:312
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2885
ScaleContext::in_v_chr_pos
int in_v_chr_pos
Definition: vf_scale.c:176
SwsContext::param
double param[2]
Input parameters for scaling algorithms that need them.
Definition: swscale_internal.h:342