FFmpeg
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <float.h>
27 #include <stdio.h>
28 #include <string.h>
29 
30 #include "avfilter.h"
31 #include "formats.h"
32 #include "internal.h"
33 #include "scale_eval.h"
34 #include "video.h"
35 #include "libavutil/avstring.h"
36 #include "libavutil/eval.h"
37 #include "libavutil/internal.h"
38 #include "libavutil/mathematics.h"
39 #include "libavutil/opt.h"
40 #include "libavutil/parseutils.h"
41 #include "libavutil/pixdesc.h"
42 #include "libavutil/imgutils.h"
43 #include "libswscale/swscale.h"
44 
45 static const char *const var_names[] = {
46  "in_w", "iw",
47  "in_h", "ih",
48  "out_w", "ow",
49  "out_h", "oh",
50  "a",
51  "sar",
52  "dar",
53  "hsub",
54  "vsub",
55  "ohsub",
56  "ovsub",
57  "n",
58  "t",
59  "pos",
60  "main_w",
61  "main_h",
62  "main_a",
63  "main_sar",
64  "main_dar", "mdar",
65  "main_hsub",
66  "main_vsub",
67  "main_n",
68  "main_t",
69  "main_pos",
70  NULL
71 };
72 
73 enum var_name {
99 };
100 
101 enum EvalMode {
105 };
106 
107 typedef struct ScaleContext {
108  const AVClass *class;
109  struct SwsContext *sws; ///< software scaler context
110  struct SwsContext *isws[2]; ///< software scaler context for interlaced material
111  // context used for forwarding options to sws
113 
114  /**
115  * New dimensions. Special values are:
116  * 0 = original width/height
117  * -1 = keep original aspect
118  * -N = try to keep aspect but make sure it is divisible by N
119  */
120  int w, h;
121  char *size_str;
122  double param[2]; // sws params
123 
124  int hsub, vsub; ///< chroma subsampling
125  int slice_y; ///< top of current output slice
126  int input_is_pal; ///< set to 1 if the input format is paletted
127  int output_is_pal; ///< set to 1 if the output format is paletted
129 
130  char *w_expr; ///< width expression string
131  char *h_expr; ///< height expression string
135 
136  char *flags_str;
137 
140 
141  int in_range;
144 
149 
152 
153  int eval_mode; ///< expression evaluation mode
154 
155 } ScaleContext;
156 
158 
159 static int config_props(AVFilterLink *outlink);
160 
162 {
163  ScaleContext *scale = ctx->priv;
164  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
165 
166  if (!scale->w_pexpr && !scale->h_pexpr)
167  return AVERROR(EINVAL);
168 
169  if (scale->w_pexpr)
170  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
171  if (scale->h_pexpr)
172  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
173 
174  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
175  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
176  return AVERROR(EINVAL);
177  }
178 
179  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
180  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
181  return AVERROR(EINVAL);
182  }
183 
184  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
185  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
186  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
187  }
188 
189  if (ctx->filter != &ff_vf_scale2ref &&
190  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
191  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
192  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
193  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
194  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
195  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
196  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
197  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
198  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
199  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
200  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
201  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
202  return AVERROR(EINVAL);
203  }
204 
205  if (scale->eval_mode == EVAL_MODE_INIT &&
206  (vars_w[VAR_N] || vars_h[VAR_N] ||
207  vars_w[VAR_T] || vars_h[VAR_T] ||
208  vars_w[VAR_POS] || vars_h[VAR_POS] ||
209  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
210  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
211  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
212  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
213  return AVERROR(EINVAL);
214  }
215 
216  return 0;
217 }
218 
219 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
220 {
221  ScaleContext *scale = ctx->priv;
222  int ret, is_inited = 0;
223  char *old_str_expr = NULL;
224  AVExpr *old_pexpr = NULL;
225 
226  if (str_expr) {
227  old_str_expr = av_strdup(str_expr);
228  if (!old_str_expr)
229  return AVERROR(ENOMEM);
230  av_opt_set(scale, var, args, 0);
231  }
232 
233  if (*pexpr_ptr) {
234  old_pexpr = *pexpr_ptr;
235  *pexpr_ptr = NULL;
236  is_inited = 1;
237  }
238 
239  ret = av_expr_parse(pexpr_ptr, args, var_names,
240  NULL, NULL, NULL, NULL, 0, ctx);
241  if (ret < 0) {
242  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
243  goto revert;
244  }
245 
246  ret = check_exprs(ctx);
247  if (ret < 0)
248  goto revert;
249 
250  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
251  goto revert;
252 
253  av_expr_free(old_pexpr);
254  old_pexpr = NULL;
255  av_freep(&old_str_expr);
256 
257  return 0;
258 
259 revert:
260  av_expr_free(*pexpr_ptr);
261  *pexpr_ptr = NULL;
262  if (old_str_expr) {
263  av_opt_set(scale, var, old_str_expr, 0);
264  av_free(old_str_expr);
265  }
266  if (old_pexpr)
267  *pexpr_ptr = old_pexpr;
268 
269  return ret;
270 }
271 
273 {
274  ScaleContext *scale = ctx->priv;
275  int ret;
276 
277  scale->sws_opts = sws_alloc_context();
278  if (!scale->sws_opts)
279  return AVERROR(ENOMEM);
280 
281  // set threads=0, so we can later check whether the user modified it
282  ret = av_opt_set_int(scale->sws_opts, "threads", 0, 0);
283  if (ret < 0)
284  return ret;
285 
286  return 0;
287 }
288 
290 {
291  ScaleContext *scale = ctx->priv;
292  int64_t threads;
293  int ret;
294 
295  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
297  "Size and width/height expressions cannot be set at the same time.\n");
298  return AVERROR(EINVAL);
299  }
300 
301  if (scale->w_expr && !scale->h_expr)
302  FFSWAP(char *, scale->w_expr, scale->size_str);
303 
304  if (scale->size_str) {
305  char buf[32];
306  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
308  "Invalid size '%s'\n", scale->size_str);
309  return ret;
310  }
311  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
312  av_opt_set(scale, "w", buf, 0);
313  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
314  av_opt_set(scale, "h", buf, 0);
315  }
316  if (!scale->w_expr)
317  av_opt_set(scale, "w", "iw", 0);
318  if (!scale->h_expr)
319  av_opt_set(scale, "h", "ih", 0);
320 
321  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
322  if (ret < 0)
323  return ret;
324 
325  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
326  if (ret < 0)
327  return ret;
328 
329  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
330  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
331 
332  if (scale->flags_str && *scale->flags_str) {
333  ret = av_opt_set(scale->sws_opts, "sws_flags", scale->flags_str, 0);
334  if (ret < 0)
335  return ret;
336  }
337 
338  for (int i = 0; i < FF_ARRAY_ELEMS(scale->param); i++)
339  if (scale->param[i] != DBL_MAX) {
340  ret = av_opt_set_double(scale->sws_opts, i ? "param1" : "param0",
341  scale->param[i], 0);
342  if (ret < 0)
343  return ret;
344  }
345 
346  // use generic thread-count if the user did not set it explicitly
347  ret = av_opt_get_int(scale->sws_opts, "threads", 0, &threads);
348  if (ret < 0)
349  return ret;
350  if (!threads)
351  av_opt_set_int(scale->sws_opts, "threads", ff_filter_get_nb_threads(ctx), 0);
352 
353  scale->in_frame_range = AVCOL_RANGE_UNSPECIFIED;
354 
355  return 0;
356 }
357 
359 {
360  ScaleContext *scale = ctx->priv;
361  av_expr_free(scale->w_pexpr);
362  av_expr_free(scale->h_pexpr);
363  scale->w_pexpr = scale->h_pexpr = NULL;
364  sws_freeContext(scale->sws_opts);
365  sws_freeContext(scale->sws);
366  sws_freeContext(scale->isws[0]);
367  sws_freeContext(scale->isws[1]);
368  scale->sws = NULL;
369 }
370 
372 {
374  const AVPixFmtDescriptor *desc;
375  enum AVPixelFormat pix_fmt;
376  int ret;
377 
378  desc = NULL;
379  formats = NULL;
380  while ((desc = av_pix_fmt_desc_next(desc))) {
384  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
385  return ret;
386  }
387  }
388  if ((ret = ff_formats_ref(formats, &ctx->inputs[0]->outcfg.formats)) < 0)
389  return ret;
390 
391  desc = NULL;
392  formats = NULL;
393  while ((desc = av_pix_fmt_desc_next(desc))) {
397  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
398  return ret;
399  }
400  }
401  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.formats)) < 0)
402  return ret;
403 
404  return 0;
405 }
406 
407 static const int *parse_yuv_type(const char *s, enum AVColorSpace colorspace)
408 {
409  if (!s)
410  s = "bt601";
411 
412  if (s && strstr(s, "bt709")) {
413  colorspace = AVCOL_SPC_BT709;
414  } else if (s && strstr(s, "fcc")) {
415  colorspace = AVCOL_SPC_FCC;
416  } else if (s && strstr(s, "smpte240m")) {
417  colorspace = AVCOL_SPC_SMPTE240M;
418  } else if (s && (strstr(s, "bt601") || strstr(s, "bt470") || strstr(s, "smpte170m"))) {
419  colorspace = AVCOL_SPC_BT470BG;
420  } else if (s && strstr(s, "bt2020")) {
421  colorspace = AVCOL_SPC_BT2020_NCL;
422  }
423 
424  if (colorspace < 1 || colorspace > 10 || colorspace == 8) {
425  colorspace = AVCOL_SPC_BT470BG;
426  }
427 
428  return sws_getCoefficients(colorspace);
429 }
430 
432 {
433  ScaleContext *scale = ctx->priv;
434  const char scale2ref = ctx->filter == &ff_vf_scale2ref;
435  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
436  const AVFilterLink *outlink = ctx->outputs[0];
438  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
439  char *expr;
440  int eval_w, eval_h;
441  int ret;
442  double res;
443  const AVPixFmtDescriptor *main_desc;
444  const AVFilterLink *main_link;
445 
446  if (scale2ref) {
447  main_link = ctx->inputs[0];
448  main_desc = av_pix_fmt_desc_get(main_link->format);
449  }
450 
451  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
452  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
453  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
454  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
455  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
456  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
457  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
458  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
459  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
460  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
461  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
462  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
463 
464  if (scale2ref) {
465  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
466  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
467  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
468  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
469  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
470  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
471  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
472  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
473  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
474  }
475 
476  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
477  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
478 
479  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
480  if (isnan(res)) {
481  expr = scale->h_expr;
482  ret = AVERROR(EINVAL);
483  goto fail;
484  }
485  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
486 
487  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
488  if (isnan(res)) {
489  expr = scale->w_expr;
490  ret = AVERROR(EINVAL);
491  goto fail;
492  }
493  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
494 
495  scale->w = eval_w;
496  scale->h = eval_h;
497 
498  return 0;
499 
500 fail:
502  "Error when evaluating the expression '%s'.\n", expr);
503  return ret;
504 }
505 
506 static int config_props(AVFilterLink *outlink)
507 {
508  AVFilterContext *ctx = outlink->src;
509  AVFilterLink *inlink0 = outlink->src->inputs[0];
510  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref ?
511  outlink->src->inputs[1] :
512  outlink->src->inputs[0];
513  enum AVPixelFormat outfmt = outlink->format;
515  ScaleContext *scale = ctx->priv;
516  uint8_t *flags_val = NULL;
517  int ret;
518 
519  if ((ret = scale_eval_dimensions(ctx)) < 0)
520  goto fail;
521 
522  outlink->w = scale->w;
523  outlink->h = scale->h;
524 
525  ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
526  scale->force_original_aspect_ratio,
527  scale->force_divisible_by);
528 
529  if (outlink->w > INT_MAX ||
530  outlink->h > INT_MAX ||
531  (outlink->h * inlink->w) > INT_MAX ||
532  (outlink->w * inlink->h) > INT_MAX)
533  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
534 
535  /* TODO: make algorithm configurable */
536 
537  scale->input_is_pal = desc->flags & AV_PIX_FMT_FLAG_PAL;
538  if (outfmt == AV_PIX_FMT_PAL8) outfmt = AV_PIX_FMT_BGR8;
539  scale->output_is_pal = av_pix_fmt_desc_get(outfmt)->flags & AV_PIX_FMT_FLAG_PAL;
540 
541  if (scale->sws)
542  sws_freeContext(scale->sws);
543  if (scale->isws[0])
544  sws_freeContext(scale->isws[0]);
545  if (scale->isws[1])
546  sws_freeContext(scale->isws[1]);
547  scale->isws[0] = scale->isws[1] = scale->sws = NULL;
548  if (inlink0->w == outlink->w &&
549  inlink0->h == outlink->h &&
550  !scale->out_color_matrix &&
551  scale->in_range == scale->out_range &&
552  inlink0->format == outlink->format)
553  ;
554  else {
555  struct SwsContext **swscs[3] = {&scale->sws, &scale->isws[0], &scale->isws[1]};
556  int i;
557 
558  for (i = 0; i < 3; i++) {
559  int in_v_chr_pos = scale->in_v_chr_pos, out_v_chr_pos = scale->out_v_chr_pos;
560  struct SwsContext *const s = sws_alloc_context();
561  if (!s)
562  return AVERROR(ENOMEM);
563  *swscs[i] = s;
564 
565  ret = av_opt_copy(s, scale->sws_opts);
566  if (ret < 0)
567  return ret;
568 
569  av_opt_set_int(s, "srcw", inlink0 ->w, 0);
570  av_opt_set_int(s, "srch", inlink0 ->h >> !!i, 0);
571  av_opt_set_int(s, "src_format", inlink0->format, 0);
572  av_opt_set_int(s, "dstw", outlink->w, 0);
573  av_opt_set_int(s, "dsth", outlink->h >> !!i, 0);
574  av_opt_set_int(s, "dst_format", outfmt, 0);
575  if (scale->in_range != AVCOL_RANGE_UNSPECIFIED)
576  av_opt_set_int(s, "src_range",
577  scale->in_range == AVCOL_RANGE_JPEG, 0);
578  else if (scale->in_frame_range != AVCOL_RANGE_UNSPECIFIED)
579  av_opt_set_int(s, "src_range",
580  scale->in_frame_range == AVCOL_RANGE_JPEG, 0);
581  if (scale->out_range != AVCOL_RANGE_UNSPECIFIED)
582  av_opt_set_int(s, "dst_range",
583  scale->out_range == AVCOL_RANGE_JPEG, 0);
584 
585  /* Override YUV420P default settings to have the correct (MPEG-2) chroma positions
586  * MPEG-2 chroma positions are used by convention
587  * XXX: support other 4:2:0 pixel formats */
588  if (inlink0->format == AV_PIX_FMT_YUV420P && scale->in_v_chr_pos == -513) {
589  in_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
590  }
591 
592  if (outlink->format == AV_PIX_FMT_YUV420P && scale->out_v_chr_pos == -513) {
593  out_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
594  }
595 
596  av_opt_set_int(s, "src_h_chr_pos", scale->in_h_chr_pos, 0);
597  av_opt_set_int(s, "src_v_chr_pos", in_v_chr_pos, 0);
598  av_opt_set_int(s, "dst_h_chr_pos", scale->out_h_chr_pos, 0);
599  av_opt_set_int(s, "dst_v_chr_pos", out_v_chr_pos, 0);
600 
601  if ((ret = sws_init_context(s, NULL, NULL)) < 0)
602  return ret;
603  if (!scale->interlaced)
604  break;
605  }
606  }
607 
608  if (inlink0->sample_aspect_ratio.num){
609  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
610  } else
611  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
612 
613  if (scale->sws)
614  av_opt_get(scale->sws, "sws_flags", 0, &flags_val);
615 
616  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s sar:%d/%d -> w:%d h:%d fmt:%s sar:%d/%d flags:%s\n",
617  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
618  inlink->sample_aspect_ratio.num, inlink->sample_aspect_ratio.den,
619  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
620  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
621  flags_val);
622  av_freep(&flags_val);
623 
624  return 0;
625 
626 fail:
627  return ret;
628 }
629 
630 static int config_props_ref(AVFilterLink *outlink)
631 {
632  AVFilterLink *inlink = outlink->src->inputs[1];
633 
634  outlink->w = inlink->w;
635  outlink->h = inlink->h;
636  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
637  outlink->time_base = inlink->time_base;
638  outlink->frame_rate = inlink->frame_rate;
639 
640  return 0;
641 }
642 
643 static int request_frame(AVFilterLink *outlink)
644 {
645  return ff_request_frame(outlink->src->inputs[0]);
646 }
647 
648 static int request_frame_ref(AVFilterLink *outlink)
649 {
650  return ff_request_frame(outlink->src->inputs[1]);
651 }
652 
653 static void frame_offset(AVFrame *frame, int dir, int is_pal)
654 {
655  for (int i = 0; i < 4 && frame->data[i]; i++) {
656  if (i == 1 && is_pal)
657  break;
658  frame->data[i] += frame->linesize[i] * dir;
659  }
660 }
661 
663  int field)
664 {
665  int orig_h_src = src->height;
666  int orig_h_dst = dst->height;
667  int ret;
668 
669  // offset the data pointers for the bottom field
670  if (field) {
671  frame_offset(src, 1, scale->input_is_pal);
672  frame_offset(dst, 1, scale->output_is_pal);
673  }
674 
675  // take every second line
676  for (int i = 0; i < 4; i++) {
677  src->linesize[i] *= 2;
678  dst->linesize[i] *= 2;
679  }
680  src->height /= 2;
681  dst->height /= 2;
682 
683  ret = sws_scale_frame(scale->isws[field], dst, src);
684  if (ret < 0)
685  return ret;
686 
687  // undo the changes we made above
688  for (int i = 0; i < 4; i++) {
689  src->linesize[i] /= 2;
690  dst->linesize[i] /= 2;
691  }
692  src->height = orig_h_src;
693  dst->height = orig_h_dst;
694 
695  if (field) {
696  frame_offset(src, -1, scale->input_is_pal);
697  frame_offset(dst, -1, scale->output_is_pal);
698  }
699 
700  return 0;
701 }
702 
703 static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
704 {
705  AVFilterContext *ctx = link->dst;
706  ScaleContext *scale = ctx->priv;
707  AVFilterLink *outlink = ctx->outputs[0];
708  AVFrame *out;
710  char buf[32];
711  int ret;
712  int in_range;
713  int frame_changed;
714 
715  *frame_out = NULL;
716  if (in->colorspace == AVCOL_SPC_YCGCO)
717  av_log(link->dst, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
718 
719  frame_changed = in->width != link->w ||
720  in->height != link->h ||
721  in->format != link->format ||
724 
726  scale->in_range == AVCOL_RANGE_UNSPECIFIED &&
727  in->color_range != scale->in_frame_range) {
728  scale->in_frame_range = in->color_range;
729  frame_changed = 1;
730  }
731 
732  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
733  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
734 
735  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
736  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
737 
738  if (scale->eval_mode == EVAL_MODE_FRAME &&
739  !frame_changed &&
740  ctx->filter != &ff_vf_scale2ref &&
741  !(vars_w[VAR_N] || vars_w[VAR_T] || vars_w[VAR_POS]) &&
742  !(vars_h[VAR_N] || vars_h[VAR_T] || vars_h[VAR_POS]) &&
743  scale->w && scale->h)
744  goto scale;
745 
746  if (scale->eval_mode == EVAL_MODE_INIT) {
747  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
748  av_opt_set(scale, "w", buf, 0);
749  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
750  av_opt_set(scale, "h", buf, 0);
751 
752  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
753  if (ret < 0)
754  return ret;
755 
756  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
757  if (ret < 0)
758  return ret;
759  }
760 
761  if (ctx->filter == &ff_vf_scale2ref) {
762  scale->var_values[VAR_S2R_MAIN_N] = link->frame_count_out;
763  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
764  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
765  } else {
766  scale->var_values[VAR_N] = link->frame_count_out;
767  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
768  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
769  }
770 
771  link->dst->inputs[0]->format = in->format;
772  link->dst->inputs[0]->w = in->width;
773  link->dst->inputs[0]->h = in->height;
774 
775  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
776  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
777 
778  if ((ret = config_props(outlink)) < 0)
779  return ret;
780  }
781 
782 scale:
783  if (!scale->sws) {
784  *frame_out = in;
785  return 0;
786  }
787 
788  scale->hsub = desc->log2_chroma_w;
789  scale->vsub = desc->log2_chroma_h;
790 
791  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
792  if (!out) {
793  av_frame_free(&in);
794  return AVERROR(ENOMEM);
795  }
796  *frame_out = out;
797 
799  out->width = outlink->w;
800  out->height = outlink->h;
801 
802  // Sanity checks:
803  // 1. If the output is RGB, set the matrix coefficients to RGB.
804  // 2. If the output is not RGB and we've got the RGB/XYZ (identity)
805  // matrix configured, unset the matrix.
806  // In theory these should be in swscale itself as the AVFrame
807  // based API gets in, so that not every swscale API user has
808  // to go through duplicating such sanity checks.
810  out->colorspace = AVCOL_SPC_RGB;
811  else if (out->colorspace == AVCOL_SPC_RGB)
812  out->colorspace = AVCOL_SPC_UNSPECIFIED;
813 
814  if (scale->output_is_pal)
815  avpriv_set_systematic_pal2((uint32_t*)out->data[1], outlink->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : outlink->format);
816 
817  in_range = in->color_range;
818 
819  if ( scale->in_color_matrix
820  || scale->out_color_matrix
821  || scale-> in_range != AVCOL_RANGE_UNSPECIFIED
822  || in_range != AVCOL_RANGE_UNSPECIFIED
823  || scale->out_range != AVCOL_RANGE_UNSPECIFIED) {
824  int in_full, out_full, brightness, contrast, saturation;
825  const int *inv_table, *table;
826 
827  sws_getColorspaceDetails(scale->sws, (int **)&inv_table, &in_full,
828  (int **)&table, &out_full,
830 
831  if (scale->in_color_matrix)
832  inv_table = parse_yuv_type(scale->in_color_matrix, in->colorspace);
833  if (scale->out_color_matrix)
834  table = parse_yuv_type(scale->out_color_matrix, AVCOL_SPC_UNSPECIFIED);
835  else if (scale->in_color_matrix)
836  table = inv_table;
837 
838  if (scale-> in_range != AVCOL_RANGE_UNSPECIFIED)
839  in_full = (scale-> in_range == AVCOL_RANGE_JPEG);
840  else if (in_range != AVCOL_RANGE_UNSPECIFIED)
841  in_full = (in_range == AVCOL_RANGE_JPEG);
842  if (scale->out_range != AVCOL_RANGE_UNSPECIFIED)
843  out_full = (scale->out_range == AVCOL_RANGE_JPEG);
844 
845  sws_setColorspaceDetails(scale->sws, inv_table, in_full,
846  table, out_full,
848  if (scale->isws[0])
849  sws_setColorspaceDetails(scale->isws[0], inv_table, in_full,
850  table, out_full,
852  if (scale->isws[1])
853  sws_setColorspaceDetails(scale->isws[1], inv_table, in_full,
854  table, out_full,
856 
857  out->color_range = out_full ? AVCOL_RANGE_JPEG : AVCOL_RANGE_MPEG;
858  }
859 
860  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
861  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
862  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
863  INT_MAX);
864 
865  if (scale->interlaced>0 || (scale->interlaced<0 && in->interlaced_frame)) {
866  ret = scale_field(scale, out, in, 0);
867  if (ret >= 0)
868  ret = scale_field(scale, out, in, 1);
869  } else {
870  ret = sws_scale_frame(scale->sws, out, in);
871  }
872 
873  av_frame_free(&in);
874  if (ret < 0)
875  av_frame_free(frame_out);
876  return ret;
877 }
878 
880 {
881  AVFilterContext *ctx = link->dst;
882  AVFilterLink *outlink = ctx->outputs[0];
883  AVFrame *out;
884  int ret;
885 
886  ret = scale_frame(link, in, &out);
887  if (out)
888  return ff_filter_frame(outlink, out);
889 
890  return ret;
891 }
892 
894 {
895  ScaleContext *scale = link->dst->priv;
896  AVFilterLink *outlink = link->dst->outputs[1];
897  int frame_changed;
898 
899  frame_changed = in->width != link->w ||
900  in->height != link->h ||
901  in->format != link->format ||
904 
905  if (frame_changed) {
906  link->format = in->format;
907  link->w = in->width;
908  link->h = in->height;
911 
912  config_props_ref(outlink);
913  }
914 
915  if (scale->eval_mode == EVAL_MODE_FRAME) {
916  scale->var_values[VAR_N] = link->frame_count_out;
917  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
918  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
919  }
920 
921  return ff_filter_frame(outlink, in);
922 }
923 
924 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
925  char *res, int res_len, int flags)
926 {
927  ScaleContext *scale = ctx->priv;
928  char *str_expr;
929  AVExpr **pexpr_ptr;
930  int ret, w, h;
931 
932  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
933  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
934 
935  if (w || h) {
936  str_expr = w ? scale->w_expr : scale->h_expr;
937  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
938 
939  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
940  } else
941  ret = AVERROR(ENOSYS);
942 
943  if (ret < 0)
944  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
945 
946  return ret;
947 }
948 
949 static const AVClass *child_class_iterate(void **iter)
950 {
951  const AVClass *c = *iter ? NULL : sws_get_class();
952  *iter = (void*)(uintptr_t)c;
953  return c;
954 }
955 
956 static void *child_next(void *obj, void *prev)
957 {
958  ScaleContext *s = obj;
959  if (!prev)
960  return s->sws_opts;
961  return NULL;
962 }
963 
964 #define OFFSET(x) offsetof(ScaleContext, x)
965 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
966 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
967 
968 static const AVOption scale_options[] = {
969  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
970  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
971  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
972  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
973  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "" }, .flags = FLAGS },
974  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
975  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, FLAGS },
976  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, FLAGS },
977  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_STRING, { .str = "auto" }, .flags = FLAGS, "color" },
978  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_STRING, { .str = NULL }, .flags = FLAGS, "color"},
979  { "auto", NULL, 0, AV_OPT_TYPE_CONST, { .str = "auto" }, 0, 0, FLAGS, "color" },
980  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt601" }, 0, 0, FLAGS, "color" },
981  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt470" }, 0, 0, FLAGS, "color" },
982  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, { .str = "smpte170m" }, 0, 0, FLAGS, "color" },
983  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt709" }, 0, 0, FLAGS, "color" },
984  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, { .str = "fcc" }, 0, 0, FLAGS, "color" },
985  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, { .str = "smpte240m" }, 0, 0, FLAGS, "color" },
986  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt2020" }, 0, 0, FLAGS, "color" },
987  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, "range" },
988  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, "range" },
989  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, "range" },
990  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, "range" },
991  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, "range" },
992  { "limited",NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, "range" },
993  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, "range" },
994  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, "range" },
995  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, "range" },
996  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, "range" },
997  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
998  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
999  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1000  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1001  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, "force_oar" },
1002  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, "force_oar" },
1003  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, "force_oar" },
1004  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, "force_oar" },
1005  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
1006  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1007  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1008  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, "eval" },
1009  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
1010  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
1011  { NULL }
1012 };
1013 
1014 static const AVClass scale_class = {
1015  .class_name = "scale(2ref)",
1016  .item_name = av_default_item_name,
1017  .option = scale_options,
1018  .version = LIBAVUTIL_VERSION_INT,
1019  .category = AV_CLASS_CATEGORY_FILTER,
1020  .child_class_iterate = child_class_iterate,
1022 };
1023 
1025  {
1026  .name = "default",
1027  .type = AVMEDIA_TYPE_VIDEO,
1028  .filter_frame = filter_frame,
1029  },
1030 };
1031 
1033  {
1034  .name = "default",
1035  .type = AVMEDIA_TYPE_VIDEO,
1036  .config_props = config_props,
1037  },
1038 };
1039 
1041  .name = "scale",
1042  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
1043  .preinit = preinit,
1044  .init = init,
1045  .uninit = uninit,
1046  .priv_size = sizeof(ScaleContext),
1047  .priv_class = &scale_class,
1051  .process_command = process_command,
1052 };
1053 
1055  {
1056  .name = "default",
1057  .type = AVMEDIA_TYPE_VIDEO,
1058  .filter_frame = filter_frame,
1059  },
1060  {
1061  .name = "ref",
1062  .type = AVMEDIA_TYPE_VIDEO,
1063  .filter_frame = filter_frame_ref,
1064  },
1065 };
1066 
1068  {
1069  .name = "default",
1070  .type = AVMEDIA_TYPE_VIDEO,
1071  .config_props = config_props,
1072  .request_frame= request_frame,
1073  },
1074  {
1075  .name = "ref",
1076  .type = AVMEDIA_TYPE_VIDEO,
1077  .config_props = config_props_ref,
1078  .request_frame= request_frame_ref,
1079  },
1080 };
1081 
1083  .name = "scale2ref",
1084  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1085  .preinit = preinit,
1086  .init = init,
1087  .uninit = uninit,
1088  .priv_size = sizeof(ScaleContext),
1089  .priv_class = &scale_class,
1093  .process_command = process_command,
1094 };
filter_frame_ref
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:893
ScaleContext::param
double param[2]
Definition: vf_scale.c:122
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:91
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:90
VAR_HSUB
@ VAR_HSUB
Definition: vf_scale.c:81
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:630
SwsContext::saturation
int saturation
Definition: swscale_internal.h:454
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:592
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
TFLAGS
#define TFLAGS
Definition: vf_scale.c:966
ScaleContext::sws_opts
struct SwsContext * sws_opts
Definition: vf_scale.c:112
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:161
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ScaleContext::input_is_pal
int input_is_pal
set to 1 if the input format is paletted
Definition: vf_scale.c:126
out
FILE * out
Definition: movenc.c:54
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
ScaleContext
Definition: vf_scale.c:107
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:969
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2888
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_scale.c:371
ScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale.c:151
avfilter_vf_scale2ref_outputs
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1067
FLAGS
#define FLAGS
Definition: vf_scale.c:965
ScaleContext::flags_str
char * flags_str
Definition: vf_scale.c:136
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:603
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:437
AVFrame::width
int width
Definition: frame.h:402
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
VAR_A
@ VAR_A
Definition: vf_scale.c:78
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:648
av_opt_set_double
int av_opt_set_double(void *obj, const char *name, double val, int search_flags)
Definition: opt.c:629
AVOption
AVOption.
Definition: opt.h:251
scale_parse_expr
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:219
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:171
table
static const uint16_t table[]
Definition: prosumer.c:205
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:643
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2895
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:415
VAR_S2R_MAIN_HSUB
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:93
ScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale.c:134
ScaleContext::out_range
int out_range
Definition: vf_scale.c:143
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale.c:92
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:588
float.h
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale.c:103
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:89
mathematics.h
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:165
ScaleContext::in_h_chr_pos
int in_h_chr_pos
Definition: vf_scale.c:147
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale.c:77
video.h
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:685
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:97
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:593
VAR_DAR
@ VAR_DAR
Definition: vf_scale.c:80
avfilter_vf_scale_inputs
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:1024
fail
#define fail()
Definition: checkasm.h:134
VARS_NB
@ VARS_NB
Definition: vf_scale.c:98
frame_offset
static void frame_offset(AVFrame *frame, int dir, int is_pal)
Definition: vf_scale.c:653
ScaleContext::isws
struct SwsContext * isws[2]
software scaler context for interlaced material
Definition: vf_scale.c:110
ScaleContext::eval_mode
int eval_mode
expression evaluation mode
Definition: vf_scale.c:153
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale.c:75
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale.c:104
scale
static av_always_inline float scale(float x, float s)
Definition: vf_v360.c:1389
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:97
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:487
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:336
AVRational::num
int num
Numerator.
Definition: rational.h:59
OFFSET
#define OFFSET(x)
Definition: vf_scale.c:964
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: vf_scale.c:272
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
AV_PIX_FMT_BGR8
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:83
TS2T
#define TS2T(ts, tb)
Definition: internal.h:265
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
ScaleContext::sws
struct SwsContext * sws
software scaler context
Definition: vf_scale.c:109
s
#define s(width, name)
Definition: cbs_vp9.c:256
VAR_OH
@ VAR_OH
Definition: vf_scale.c:77
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:88
SwsContext::brightness
int brightness
Definition: swscale_internal.h:454
scale_frame
static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
Definition: vf_scale.c:703
ScaleContext::slice_y
int slice_y
top of current output slice
Definition: vf_scale.c:125
AVFrame::pkt_pos
int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:619
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Definition: opt.h:227
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:756
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:596
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_scale.c:289
VAR_OVSUB
@ VAR_OVSUB
Definition: vf_scale.c:84
ScaleContext::in_color_matrix
char * in_color_matrix
Definition: vf_scale.c:138
var_name
var_name
Definition: noise_bsf.c:46
ctx
AVFormatContext * ctx
Definition: movenc.c:48
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:924
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:766
AVExpr
Definition: eval.c:157
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
SwsContext::contrast
int contrast
Definition: swscale_internal.h:454
ScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale.c:132
avpriv_set_systematic_pal2
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:178
NAN
#define NAN
Definition: mathematics.h:64
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:194
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
ScaleContext::out_h_chr_pos
int out_h_chr_pos
Definition: vf_scale.c:145
scale_field
static int scale_field(ScaleContext *scale, AVFrame *dst, AVFrame *src, int field)
Definition: vf_scale.c:662
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
ScaleContext::out_v_chr_pos
int out_v_chr_pos
Definition: vf_scale.c:146
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:594
VAR_T
@ VAR_T
Definition: vf_scale.c:86
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
isnan
#define isnan(x)
Definition: libm.h:340
ScaleContext::in_range
int in_range
Definition: vf_scale.c:141
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:400
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale.c:74
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
ff_add_format
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:449
parseutils.h
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1176
ScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale.c:133
double
double
Definition: af_crystalizer.c:132
AVCOL_SPC_YCGCO
@ AVCOL_SPC_YCGCO
used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
Definition: pixfmt.h:596
ScaleContext::in_frame_range
int in_frame_range
Definition: vf_scale.c:142
av_opt_get_int
int av_opt_get_int(void *obj, const char *name, int search_flags, int64_t *out_val)
Definition: opt.c:978
sws_setColorspaceDetails
int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4], int srcRange, const int table[4], int dstRange, int brightness, int contrast, int saturation)
Definition: utils.c:995
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:94
ff_vf_scale2ref
const AVFilter ff_vf_scale2ref
Definition: vf_scale.c:157
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:627
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
av_opt_set_int
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
Definition: opt.c:624
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
VAR_IW
@ VAR_IW
Definition: vf_scale.c:74
eval.h
VAR_IH
@ VAR_IH
Definition: vf_scale.c:75
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
AV_PIX_FMT_FLAG_RGB
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:136
AVClass::child_next
void *(* child_next)(void *obj, void *prev)
Return next AVOptions-enabled child or NULL.
Definition: log.h:131
child_class_iterate
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:949
ScaleContext::w
int w
New dimensions.
Definition: vf_scale.c:120
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:452
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:417
scale_eval.h
ScaleContext::hsub
int hsub
Definition: vf_scale.c:124
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale.c:76
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2907
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:879
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
sws_isSupportedInput
#define sws_isSupportedInput(x)
internal.h
VAR_POS
@ VAR_POS
Definition: vf_scale.c:87
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:595
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:486
ScaleContext::vsub
int vsub
chroma subsampling
Definition: vf_scale.c:124
sws_scale_frame
int sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src)
Scale source data from src and write the output to dst.
Definition: swscale.c:1182
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:506
interlaced
uint8_t interlaced
Definition: mxfenc.c:2046
ScaleContext::output_is_pal
int output_is_pal
set to 1 if the output format is paletted
Definition: vf_scale.c:127
VAR_SAR
@ VAR_SAR
Definition: vf_scale.c:79
sws_isSupportedEndiannessConversion
int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt)
Definition: utils.c:335
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:598
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:95
internal.h
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:587
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:777
EvalMode
EvalMode
Definition: af_volume.h:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:590
ScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale.c:131
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:644
avfilter_vf_scale_outputs
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:1032
AVFilter
Filter definition.
Definition: avfilter.h:161
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
parse_yuv_type
static const int * parse_yuv_type(const char *s, enum AVColorSpace colorspace)
Definition: vf_scale.c:407
child_next
static void * child_next(void *obj, void *prev)
Definition: vf_scale.c:956
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:432
sws_getColorspaceDetails
int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table, int *srcRange, int **table, int *dstRange, int *brightness, int *contrast, int *saturation)
Definition: utils.c:1152
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
sws_init_context
av_warn_unused_result int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter)
Initialize the swscaler context sws_context.
Definition: utils.c:2026
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:96
ScaleContext::out_color_matrix
char * out_color_matrix
Definition: vf_scale.c:139
scale_eval_dimensions
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:431
var_names
static const char *const var_names[]
Definition: vf_scale.c:45
AVFrame::height
int height
Definition: frame.h:402
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:92
scale_options
static const AVOption scale_options[]
Definition: vf_scale.c:968
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2415
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:592
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:358
ScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale.c:150
avfilter_vf_scale2ref_inputs
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1054
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:392
VAR_OW
@ VAR_OW
Definition: vf_scale.c:76
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:270
desc
const char * desc
Definition: libsvtav1.c:83
VAR_VSUB
@ VAR_VSUB
Definition: vf_scale.c:82
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
sws_getCoefficients
const int * sws_getCoefficients(int colorspace)
Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDet...
Definition: yuv2rgb.c:62
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ScaleContext::interlaced
int interlaced
Definition: vf_scale.c:128
av_opt_copy
int av_opt_copy(void *dst, const void *src)
Copy options from src object into dest object.
Definition: opt.c:1885
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
VAR_N
@ VAR_N
Definition: vf_scale.c:85
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:195
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
scale_class
static const AVClass scale_class
Definition: vf_scale.c:1014
ScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale.c:130
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale.c:102
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
av_opt_get
int av_opt_get(void *obj, const char *name, int search_flags, uint8_t **out_val)
Definition: opt.c:837
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
h
h
Definition: vp9dsp_template.c:2038
avstring.h
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
VAR_OHSUB
@ VAR_OHSUB
Definition: vf_scale.c:83
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:589
int
int
Definition: ffmpeg_filter.c:156
SwsContext
Definition: swscale_internal.h:299
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
ff_vf_scale
const AVFilter ff_vf_scale
Definition: vf_scale.c:1040
snprintf
#define snprintf
Definition: snprintf.h:34
ScaleContext::size_str
char * size_str
Definition: vf_scale.c:121
VAR_S2R_MAIN_VSUB
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:94
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
swscale.h
ScaleContext::h
int h
Definition: vf_scale.c:120
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2808
ScaleContext::in_v_chr_pos
int in_v_chr_pos
Definition: vf_scale.c:148
SwsContext::param
double param[2]
Input parameters for scaling algorithms that need them.
Definition: swscale_internal.h:342