FFmpeg
cmdutils.c
Go to the documentation of this file.
1 /*
2  * Various utilities for command line tools
3  * Copyright (c) 2000-2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <string.h>
23 #include <stdint.h>
24 #include <stdlib.h>
25 #include <errno.h>
26 #include <math.h>
27 
28 /* Include only the enabled headers since some compilers (namely, Sun
29  Studio) will not omit unused inline functions and create undefined
30  references to libraries that are not being built. */
31 
32 #include "config.h"
33 #include "compat/va_copy.h"
34 #include "libavformat/avformat.h"
35 #include "libavfilter/avfilter.h"
36 #include "libavdevice/avdevice.h"
37 #include "libswscale/swscale.h"
40 #include "libavutil/attributes.h"
41 #include "libavutil/avassert.h"
42 #include "libavutil/avstring.h"
43 #include "libavutil/bprint.h"
44 #include "libavutil/display.h"
45 #include "libavutil/mathematics.h"
46 #include "libavutil/imgutils.h"
47 #include "libavutil/libm.h"
48 #include "libavutil/parseutils.h"
49 #include "libavutil/pixdesc.h"
50 #include "libavutil/eval.h"
51 #include "libavutil/dict.h"
52 #include "libavutil/opt.h"
53 #include "libavutil/cpu.h"
54 #include "libavutil/ffversion.h"
55 #include "libavutil/version.h"
56 #include "cmdutils.h"
57 #if HAVE_SYS_RESOURCE_H
58 #include <sys/time.h>
59 #include <sys/resource.h>
60 #endif
61 #ifdef _WIN32
62 #include <windows.h>
63 #endif
64 
65 static int init_report(const char *env);
66 
70 
71 static FILE *report_file;
73 int hide_banner = 0;
74 
79 };
80 
81 void init_opts(void)
82 {
83  av_dict_set(&sws_dict, "flags", "bicubic", 0);
84 }
85 
86 void uninit_opts(void)
87 {
93 }
94 
95 void log_callback_help(void *ptr, int level, const char *fmt, va_list vl)
96 {
97  vfprintf(stdout, fmt, vl);
98 }
99 
100 static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl)
101 {
102  va_list vl2;
103  char line[1024];
104  static int print_prefix = 1;
105 
106  va_copy(vl2, vl);
107  av_log_default_callback(ptr, level, fmt, vl);
108  av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
109  va_end(vl2);
110  if (report_file_level >= level) {
111  fputs(line, report_file);
112  fflush(report_file);
113  }
114 }
115 
116 void init_dynload(void)
117 {
118 #if HAVE_SETDLLDIRECTORY && defined(_WIN32)
119  /* Calling SetDllDirectory with the empty string (but not NULL) removes the
120  * current working directory from the DLL search path as a security pre-caution. */
121  SetDllDirectory("");
122 #endif
123 }
124 
125 static void (*program_exit)(int ret);
126 
127 void register_exit(void (*cb)(int ret))
128 {
129  program_exit = cb;
130 }
131 
132 void exit_program(int ret)
133 {
134  if (program_exit)
135  program_exit(ret);
136 
137  exit(ret);
138 }
139 
140 double parse_number_or_die(const char *context, const char *numstr, int type,
141  double min, double max)
142 {
143  char *tail;
144  const char *error;
145  double d = av_strtod(numstr, &tail);
146  if (*tail)
147  error = "Expected number for %s but found: %s\n";
148  else if (d < min || d > max)
149  error = "The value for %s was %s which is not within %f - %f\n";
150  else if (type == OPT_INT64 && (int64_t)d != d)
151  error = "Expected int64 for %s but found %s\n";
152  else if (type == OPT_INT && (int)d != d)
153  error = "Expected int for %s but found %s\n";
154  else
155  return d;
156  av_log(NULL, AV_LOG_FATAL, error, context, numstr, min, max);
157  exit_program(1);
158  return 0;
159 }
160 
161 int64_t parse_time_or_die(const char *context, const char *timestr,
162  int is_duration)
163 {
164  int64_t us;
165  if (av_parse_time(&us, timestr, is_duration) < 0) {
166  av_log(NULL, AV_LOG_FATAL, "Invalid %s specification for %s: %s\n",
167  is_duration ? "duration" : "date", context, timestr);
168  exit_program(1);
169  }
170  return us;
171 }
172 
173 void show_help_options(const OptionDef *options, const char *msg, int req_flags,
174  int rej_flags, int alt_flags)
175 {
176  const OptionDef *po;
177  int first;
178 
179  first = 1;
180  for (po = options; po->name; po++) {
181  char buf[128];
182 
183  if (((po->flags & req_flags) != req_flags) ||
184  (alt_flags && !(po->flags & alt_flags)) ||
185  (po->flags & rej_flags))
186  continue;
187 
188  if (first) {
189  printf("%s\n", msg);
190  first = 0;
191  }
192  av_strlcpy(buf, po->name, sizeof(buf));
193  if (po->argname) {
194  av_strlcat(buf, " ", sizeof(buf));
195  av_strlcat(buf, po->argname, sizeof(buf));
196  }
197  printf("-%-17s %s\n", buf, po->help);
198  }
199  printf("\n");
200 }
201 
202 void show_help_children(const AVClass *class, int flags)
203 {
204  void *iter = NULL;
205  const AVClass *child;
206  if (class->option) {
207  av_opt_show2(&class, NULL, flags, 0);
208  printf("\n");
209  }
210 
211  while (child = av_opt_child_class_iterate(class, &iter))
212  show_help_children(child, flags);
213 }
214 
215 static const OptionDef *find_option(const OptionDef *po, const char *name)
216 {
217  while (po->name) {
218  const char *end;
219  if (av_strstart(name, po->name, &end) && (!*end || *end == ':'))
220  break;
221  po++;
222  }
223  return po;
224 }
225 
226 /* _WIN32 means using the windows libc - cygwin doesn't define that
227  * by default. HAVE_COMMANDLINETOARGVW is true on cygwin, while
228  * it doesn't provide the actual command line via GetCommandLineW(). */
229 #if HAVE_COMMANDLINETOARGVW && defined(_WIN32)
230 #include <shellapi.h>
231 /* Will be leaked on exit */
232 static char** win32_argv_utf8 = NULL;
233 static int win32_argc = 0;
234 
235 /**
236  * Prepare command line arguments for executable.
237  * For Windows - perform wide-char to UTF-8 conversion.
238  * Input arguments should be main() function arguments.
239  * @param argc_ptr Arguments number (including executable)
240  * @param argv_ptr Arguments list.
241  */
242 static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
243 {
244  char *argstr_flat;
245  wchar_t **argv_w;
246  int i, buffsize = 0, offset = 0;
247 
248  if (win32_argv_utf8) {
249  *argc_ptr = win32_argc;
250  *argv_ptr = win32_argv_utf8;
251  return;
252  }
253 
254  win32_argc = 0;
255  argv_w = CommandLineToArgvW(GetCommandLineW(), &win32_argc);
256  if (win32_argc <= 0 || !argv_w)
257  return;
258 
259  /* determine the UTF-8 buffer size (including NULL-termination symbols) */
260  for (i = 0; i < win32_argc; i++)
261  buffsize += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1,
262  NULL, 0, NULL, NULL);
263 
264  win32_argv_utf8 = av_mallocz(sizeof(char *) * (win32_argc + 1) + buffsize);
265  argstr_flat = (char *)win32_argv_utf8 + sizeof(char *) * (win32_argc + 1);
266  if (!win32_argv_utf8) {
267  LocalFree(argv_w);
268  return;
269  }
270 
271  for (i = 0; i < win32_argc; i++) {
272  win32_argv_utf8[i] = &argstr_flat[offset];
273  offset += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1,
274  &argstr_flat[offset],
275  buffsize - offset, NULL, NULL);
276  }
277  win32_argv_utf8[i] = NULL;
278  LocalFree(argv_w);
279 
280  *argc_ptr = win32_argc;
281  *argv_ptr = win32_argv_utf8;
282 }
283 #else
284 static inline void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
285 {
286  /* nothing to do */
287 }
288 #endif /* HAVE_COMMANDLINETOARGVW */
289 
290 static int write_option(void *optctx, const OptionDef *po, const char *opt,
291  const char *arg)
292 {
293  /* new-style options contain an offset into optctx, old-style address of
294  * a global var*/
295  void *dst = po->flags & (OPT_OFFSET | OPT_SPEC) ?
296  (uint8_t *)optctx + po->u.off : po->u.dst_ptr;
297  int *dstcount;
298 
299  if (po->flags & OPT_SPEC) {
300  SpecifierOpt **so = dst;
301  char *p = strchr(opt, ':');
302  char *str;
303 
304  dstcount = (int *)(so + 1);
305  *so = grow_array(*so, sizeof(**so), dstcount, *dstcount + 1);
306  str = av_strdup(p ? p + 1 : "");
307  if (!str)
308  return AVERROR(ENOMEM);
309  (*so)[*dstcount - 1].specifier = str;
310  dst = &(*so)[*dstcount - 1].u;
311  }
312 
313  if (po->flags & OPT_STRING) {
314  char *str;
315  str = av_strdup(arg);
316  av_freep(dst);
317  if (!str)
318  return AVERROR(ENOMEM);
319  *(char **)dst = str;
320  } else if (po->flags & OPT_BOOL || po->flags & OPT_INT) {
321  *(int *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
322  } else if (po->flags & OPT_INT64) {
323  *(int64_t *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT64_MIN, INT64_MAX);
324  } else if (po->flags & OPT_TIME) {
325  *(int64_t *)dst = parse_time_or_die(opt, arg, 1);
326  } else if (po->flags & OPT_FLOAT) {
327  *(float *)dst = parse_number_or_die(opt, arg, OPT_FLOAT, -INFINITY, INFINITY);
328  } else if (po->flags & OPT_DOUBLE) {
329  *(double *)dst = parse_number_or_die(opt, arg, OPT_DOUBLE, -INFINITY, INFINITY);
330  } else if (po->u.func_arg) {
331  int ret = po->u.func_arg(optctx, opt, arg);
332  if (ret < 0) {
334  "Failed to set value '%s' for option '%s': %s\n",
335  arg, opt, av_err2str(ret));
336  return ret;
337  }
338  }
339  if (po->flags & OPT_EXIT)
340  exit_program(0);
341 
342  return 0;
343 }
344 
345 int parse_option(void *optctx, const char *opt, const char *arg,
346  const OptionDef *options)
347 {
348  const OptionDef *po;
349  int ret;
350 
351  po = find_option(options, opt);
352  if (!po->name && opt[0] == 'n' && opt[1] == 'o') {
353  /* handle 'no' bool option */
354  po = find_option(options, opt + 2);
355  if ((po->name && (po->flags & OPT_BOOL)))
356  arg = "0";
357  } else if (po->flags & OPT_BOOL)
358  arg = "1";
359 
360  if (!po->name)
361  po = find_option(options, "default");
362  if (!po->name) {
363  av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'\n", opt);
364  return AVERROR(EINVAL);
365  }
366  if (po->flags & HAS_ARG && !arg) {
367  av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'\n", opt);
368  return AVERROR(EINVAL);
369  }
370 
371  ret = write_option(optctx, po, opt, arg);
372  if (ret < 0)
373  return ret;
374 
375  return !!(po->flags & HAS_ARG);
376 }
377 
378 void parse_options(void *optctx, int argc, char **argv, const OptionDef *options,
379  void (*parse_arg_function)(void *, const char*))
380 {
381  const char *opt;
382  int optindex, handleoptions = 1, ret;
383 
384  /* perform system-dependent conversions for arguments list */
385  prepare_app_arguments(&argc, &argv);
386 
387  /* parse options */
388  optindex = 1;
389  while (optindex < argc) {
390  opt = argv[optindex++];
391 
392  if (handleoptions && opt[0] == '-' && opt[1] != '\0') {
393  if (opt[1] == '-' && opt[2] == '\0') {
394  handleoptions = 0;
395  continue;
396  }
397  opt++;
398 
399  if ((ret = parse_option(optctx, opt, argv[optindex], options)) < 0)
400  exit_program(1);
401  optindex += ret;
402  } else {
403  if (parse_arg_function)
404  parse_arg_function(optctx, opt);
405  }
406  }
407 }
408 
409 int parse_optgroup(void *optctx, OptionGroup *g)
410 {
411  int i, ret;
412 
413  av_log(NULL, AV_LOG_DEBUG, "Parsing a group of options: %s %s.\n",
414  g->group_def->name, g->arg);
415 
416  for (i = 0; i < g->nb_opts; i++) {
417  Option *o = &g->opts[i];
418 
419  if (g->group_def->flags &&
420  !(g->group_def->flags & o->opt->flags)) {
421  av_log(NULL, AV_LOG_ERROR, "Option %s (%s) cannot be applied to "
422  "%s %s -- you are trying to apply an input option to an "
423  "output file or vice versa. Move this option before the "
424  "file it belongs to.\n", o->key, o->opt->help,
425  g->group_def->name, g->arg);
426  return AVERROR(EINVAL);
427  }
428 
429  av_log(NULL, AV_LOG_DEBUG, "Applying option %s (%s) with argument %s.\n",
430  o->key, o->opt->help, o->val);
431 
432  ret = write_option(optctx, o->opt, o->key, o->val);
433  if (ret < 0)
434  return ret;
435  }
436 
437  av_log(NULL, AV_LOG_DEBUG, "Successfully parsed a group of options.\n");
438 
439  return 0;
440 }
441 
442 int locate_option(int argc, char **argv, const OptionDef *options,
443  const char *optname)
444 {
445  const OptionDef *po;
446  int i;
447 
448  for (i = 1; i < argc; i++) {
449  const char *cur_opt = argv[i];
450 
451  if (*cur_opt++ != '-')
452  continue;
453 
454  po = find_option(options, cur_opt);
455  if (!po->name && cur_opt[0] == 'n' && cur_opt[1] == 'o')
456  po = find_option(options, cur_opt + 2);
457 
458  if ((!po->name && !strcmp(cur_opt, optname)) ||
459  (po->name && !strcmp(optname, po->name)))
460  return i;
461 
462  if (!po->name || po->flags & HAS_ARG)
463  i++;
464  }
465  return 0;
466 }
467 
468 static void dump_argument(const char *a)
469 {
470  const unsigned char *p;
471 
472  for (p = a; *p; p++)
473  if (!((*p >= '+' && *p <= ':') || (*p >= '@' && *p <= 'Z') ||
474  *p == '_' || (*p >= 'a' && *p <= 'z')))
475  break;
476  if (!*p) {
477  fputs(a, report_file);
478  return;
479  }
480  fputc('"', report_file);
481  for (p = a; *p; p++) {
482  if (*p == '\\' || *p == '"' || *p == '$' || *p == '`')
483  fprintf(report_file, "\\%c", *p);
484  else if (*p < ' ' || *p > '~')
485  fprintf(report_file, "\\x%02x", *p);
486  else
487  fputc(*p, report_file);
488  }
489  fputc('"', report_file);
490 }
491 
492 static void check_options(const OptionDef *po)
493 {
494  while (po->name) {
495  if (po->flags & OPT_PERFILE)
497  po++;
498  }
499 }
500 
501 void parse_loglevel(int argc, char **argv, const OptionDef *options)
502 {
503  int idx = locate_option(argc, argv, options, "loglevel");
504  const char *env;
505 
507 
508  if (!idx)
509  idx = locate_option(argc, argv, options, "v");
510  if (idx && argv[idx + 1])
511  opt_loglevel(NULL, "loglevel", argv[idx + 1]);
512  idx = locate_option(argc, argv, options, "report");
513  if ((env = getenv("FFREPORT")) || idx) {
514  init_report(env);
515  if (report_file) {
516  int i;
517  fprintf(report_file, "Command line:\n");
518  for (i = 0; i < argc; i++) {
519  dump_argument(argv[i]);
520  fputc(i < argc - 1 ? ' ' : '\n', report_file);
521  }
522  fflush(report_file);
523  }
524  }
525  idx = locate_option(argc, argv, options, "hide_banner");
526  if (idx)
527  hide_banner = 1;
528 }
529 
530 static const AVOption *opt_find(void *obj, const char *name, const char *unit,
531  int opt_flags, int search_flags)
532 {
533  const AVOption *o = av_opt_find(obj, name, unit, opt_flags, search_flags);
534  if(o && !o->flags)
535  return NULL;
536  return o;
537 }
538 
539 #define FLAGS (o->type == AV_OPT_TYPE_FLAGS && (arg[0]=='-' || arg[0]=='+')) ? AV_DICT_APPEND : 0
540 int opt_default(void *optctx, const char *opt, const char *arg)
541 {
542  const AVOption *o;
543  int consumed = 0;
544  char opt_stripped[128];
545  const char *p;
546  const AVClass *cc = avcodec_get_class(), *fc = avformat_get_class();
547 #if CONFIG_SWSCALE
548  const AVClass *sc = sws_get_class();
549 #endif
550 #if CONFIG_SWRESAMPLE
551  const AVClass *swr_class = swr_get_class();
552 #endif
553 
554  if (!strcmp(opt, "debug") || !strcmp(opt, "fdebug"))
556 
557  if (!(p = strchr(opt, ':')))
558  p = opt + strlen(opt);
559  av_strlcpy(opt_stripped, opt, FFMIN(sizeof(opt_stripped), p - opt + 1));
560 
561  if ((o = opt_find(&cc, opt_stripped, NULL, 0,
563  ((opt[0] == 'v' || opt[0] == 'a' || opt[0] == 's') &&
564  (o = opt_find(&cc, opt + 1, NULL, 0, AV_OPT_SEARCH_FAKE_OBJ)))) {
565  av_dict_set(&codec_opts, opt, arg, FLAGS);
566  consumed = 1;
567  }
568  if ((o = opt_find(&fc, opt, NULL, 0,
570  av_dict_set(&format_opts, opt, arg, FLAGS);
571  if (consumed)
572  av_log(NULL, AV_LOG_VERBOSE, "Routing option %s to both codec and muxer layer\n", opt);
573  consumed = 1;
574  }
575 #if CONFIG_SWSCALE
576  if (!consumed && (o = opt_find(&sc, opt, NULL, 0,
578  struct SwsContext *sws = sws_alloc_context();
579  int ret = av_opt_set(sws, opt, arg, 0);
580  sws_freeContext(sws);
581  if (!strcmp(opt, "srcw") || !strcmp(opt, "srch") ||
582  !strcmp(opt, "dstw") || !strcmp(opt, "dsth") ||
583  !strcmp(opt, "src_format") || !strcmp(opt, "dst_format")) {
584  av_log(NULL, AV_LOG_ERROR, "Directly using swscale dimensions/format options is not supported, please use the -s or -pix_fmt options\n");
585  return AVERROR(EINVAL);
586  }
587  if (ret < 0) {
588  av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt);
589  return ret;
590  }
591 
592  av_dict_set(&sws_dict, opt, arg, FLAGS);
593 
594  consumed = 1;
595  }
596 #else
597  if (!consumed && !strcmp(opt, "sws_flags")) {
598  av_log(NULL, AV_LOG_WARNING, "Ignoring %s %s, due to disabled swscale\n", opt, arg);
599  consumed = 1;
600  }
601 #endif
602 #if CONFIG_SWRESAMPLE
603  if (!consumed && (o=opt_find(&swr_class, opt, NULL, 0,
605  struct SwrContext *swr = swr_alloc();
606  int ret = av_opt_set(swr, opt, arg, 0);
607  swr_free(&swr);
608  if (ret < 0) {
609  av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt);
610  return ret;
611  }
612  av_dict_set(&swr_opts, opt, arg, FLAGS);
613  consumed = 1;
614  }
615 #endif
616 
617  if (consumed)
618  return 0;
620 }
621 
622 /*
623  * Check whether given option is a group separator.
624  *
625  * @return index of the group definition that matched or -1 if none
626  */
627 static int match_group_separator(const OptionGroupDef *groups, int nb_groups,
628  const char *opt)
629 {
630  int i;
631 
632  for (i = 0; i < nb_groups; i++) {
633  const OptionGroupDef *p = &groups[i];
634  if (p->sep && !strcmp(p->sep, opt))
635  return i;
636  }
637 
638  return -1;
639 }
640 
641 /*
642  * Finish parsing an option group.
643  *
644  * @param group_idx which group definition should this group belong to
645  * @param arg argument of the group delimiting option
646  */
647 static void finish_group(OptionParseContext *octx, int group_idx,
648  const char *arg)
649 {
650  OptionGroupList *l = &octx->groups[group_idx];
651  OptionGroup *g;
652 
653  GROW_ARRAY(l->groups, l->nb_groups);
654  g = &l->groups[l->nb_groups - 1];
655 
656  *g = octx->cur_group;
657  g->arg = arg;
658  g->group_def = l->group_def;
659  g->sws_dict = sws_dict;
660  g->swr_opts = swr_opts;
661  g->codec_opts = codec_opts;
662  g->format_opts = format_opts;
663  g->resample_opts = resample_opts;
664 
665  codec_opts = NULL;
666  format_opts = NULL;
668  sws_dict = NULL;
669  swr_opts = NULL;
670  init_opts();
671 
672  memset(&octx->cur_group, 0, sizeof(octx->cur_group));
673 }
674 
675 /*
676  * Add an option instance to currently parsed group.
677  */
678 static void add_opt(OptionParseContext *octx, const OptionDef *opt,
679  const char *key, const char *val)
680 {
681  int global = !(opt->flags & (OPT_PERFILE | OPT_SPEC | OPT_OFFSET));
682  OptionGroup *g = global ? &octx->global_opts : &octx->cur_group;
683 
684  GROW_ARRAY(g->opts, g->nb_opts);
685  g->opts[g->nb_opts - 1].opt = opt;
686  g->opts[g->nb_opts - 1].key = key;
687  g->opts[g->nb_opts - 1].val = val;
688 }
689 
691  const OptionGroupDef *groups, int nb_groups)
692 {
693  static const OptionGroupDef global_group = { "global" };
694  int i;
695 
696  memset(octx, 0, sizeof(*octx));
697 
698  octx->nb_groups = nb_groups;
699  octx->groups = av_mallocz_array(octx->nb_groups, sizeof(*octx->groups));
700  if (!octx->groups)
701  exit_program(1);
702 
703  for (i = 0; i < octx->nb_groups; i++)
704  octx->groups[i].group_def = &groups[i];
705 
706  octx->global_opts.group_def = &global_group;
707  octx->global_opts.arg = "";
708 
709  init_opts();
710 }
711 
713 {
714  int i, j;
715 
716  for (i = 0; i < octx->nb_groups; i++) {
717  OptionGroupList *l = &octx->groups[i];
718 
719  for (j = 0; j < l->nb_groups; j++) {
720  av_freep(&l->groups[j].opts);
724 
725  av_dict_free(&l->groups[j].sws_dict);
726  av_dict_free(&l->groups[j].swr_opts);
727  }
728  av_freep(&l->groups);
729  }
730  av_freep(&octx->groups);
731 
732  av_freep(&octx->cur_group.opts);
733  av_freep(&octx->global_opts.opts);
734 
735  uninit_opts();
736 }
737 
738 int split_commandline(OptionParseContext *octx, int argc, char *argv[],
739  const OptionDef *options,
740  const OptionGroupDef *groups, int nb_groups)
741 {
742  int optindex = 1;
743  int dashdash = -2;
744 
745  /* perform system-dependent conversions for arguments list */
746  prepare_app_arguments(&argc, &argv);
747 
748  init_parse_context(octx, groups, nb_groups);
749  av_log(NULL, AV_LOG_DEBUG, "Splitting the commandline.\n");
750 
751  while (optindex < argc) {
752  const char *opt = argv[optindex++], *arg;
753  const OptionDef *po;
754  int ret;
755 
756  av_log(NULL, AV_LOG_DEBUG, "Reading option '%s' ...", opt);
757 
758  if (opt[0] == '-' && opt[1] == '-' && !opt[2]) {
759  dashdash = optindex;
760  continue;
761  }
762  /* unnamed group separators, e.g. output filename */
763  if (opt[0] != '-' || !opt[1] || dashdash+1 == optindex) {
764  finish_group(octx, 0, opt);
765  av_log(NULL, AV_LOG_DEBUG, " matched as %s.\n", groups[0].name);
766  continue;
767  }
768  opt++;
769 
770 #define GET_ARG(arg) \
771 do { \
772  arg = argv[optindex++]; \
773  if (!arg) { \
774  av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'.\n", opt);\
775  return AVERROR(EINVAL); \
776  } \
777 } while (0)
778 
779  /* named group separators, e.g. -i */
780  if ((ret = match_group_separator(groups, nb_groups, opt)) >= 0) {
781  GET_ARG(arg);
782  finish_group(octx, ret, arg);
783  av_log(NULL, AV_LOG_DEBUG, " matched as %s with argument '%s'.\n",
784  groups[ret].name, arg);
785  continue;
786  }
787 
788  /* normal options */
789  po = find_option(options, opt);
790  if (po->name) {
791  if (po->flags & OPT_EXIT) {
792  /* optional argument, e.g. -h */
793  arg = argv[optindex++];
794  } else if (po->flags & HAS_ARG) {
795  GET_ARG(arg);
796  } else {
797  arg = "1";
798  }
799 
800  add_opt(octx, po, opt, arg);
801  av_log(NULL, AV_LOG_DEBUG, " matched as option '%s' (%s) with "
802  "argument '%s'.\n", po->name, po->help, arg);
803  continue;
804  }
805 
806  /* AVOptions */
807  if (argv[optindex]) {
808  ret = opt_default(NULL, opt, argv[optindex]);
809  if (ret >= 0) {
810  av_log(NULL, AV_LOG_DEBUG, " matched as AVOption '%s' with "
811  "argument '%s'.\n", opt, argv[optindex]);
812  optindex++;
813  continue;
814  } else if (ret != AVERROR_OPTION_NOT_FOUND) {
815  av_log(NULL, AV_LOG_ERROR, "Error parsing option '%s' "
816  "with argument '%s'.\n", opt, argv[optindex]);
817  return ret;
818  }
819  }
820 
821  /* boolean -nofoo options */
822  if (opt[0] == 'n' && opt[1] == 'o' &&
823  (po = find_option(options, opt + 2)) &&
824  po->name && po->flags & OPT_BOOL) {
825  add_opt(octx, po, opt, "0");
826  av_log(NULL, AV_LOG_DEBUG, " matched as option '%s' (%s) with "
827  "argument 0.\n", po->name, po->help);
828  continue;
829  }
830 
831  av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'.\n", opt);
833  }
834 
836  av_log(NULL, AV_LOG_WARNING, "Trailing option(s) found in the "
837  "command: may be ignored.\n");
838 
839  av_log(NULL, AV_LOG_DEBUG, "Finished splitting the commandline.\n");
840 
841  return 0;
842 }
843 
844 int opt_cpuflags(void *optctx, const char *opt, const char *arg)
845 {
846  int ret;
847  unsigned flags = av_get_cpu_flags();
848 
849  if ((ret = av_parse_cpu_caps(&flags, arg)) < 0)
850  return ret;
851 
853  return 0;
854 }
855 
856 int opt_loglevel(void *optctx, const char *opt, const char *arg)
857 {
858  const struct { const char *name; int level; } log_levels[] = {
859  { "quiet" , AV_LOG_QUIET },
860  { "panic" , AV_LOG_PANIC },
861  { "fatal" , AV_LOG_FATAL },
862  { "error" , AV_LOG_ERROR },
863  { "warning", AV_LOG_WARNING },
864  { "info" , AV_LOG_INFO },
865  { "verbose", AV_LOG_VERBOSE },
866  { "debug" , AV_LOG_DEBUG },
867  { "trace" , AV_LOG_TRACE },
868  };
869  const char *token;
870  char *tail;
871  int flags = av_log_get_flags();
872  int level = av_log_get_level();
873  int cmd, i = 0;
874 
875  av_assert0(arg);
876  while (*arg) {
877  token = arg;
878  if (*token == '+' || *token == '-') {
879  cmd = *token++;
880  } else {
881  cmd = 0;
882  }
883  if (!i && !cmd) {
884  flags = 0; /* missing relative prefix, build absolute value */
885  }
886  if (!strncmp(token, "repeat", 6)) {
887  if (cmd == '-') {
889  } else {
891  }
892  arg = token + 6;
893  } else if (!strncmp(token, "level", 5)) {
894  if (cmd == '-') {
896  } else {
898  }
899  arg = token + 5;
900  } else {
901  break;
902  }
903  i++;
904  }
905  if (!*arg) {
906  goto end;
907  } else if (*arg == '+') {
908  arg++;
909  } else if (!i) {
910  flags = av_log_get_flags(); /* level value without prefix, reset flags */
911  }
912 
913  for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++) {
914  if (!strcmp(log_levels[i].name, arg)) {
915  level = log_levels[i].level;
916  goto end;
917  }
918  }
919 
920  level = strtol(arg, &tail, 10);
921  if (*tail) {
922  av_log(NULL, AV_LOG_FATAL, "Invalid loglevel \"%s\". "
923  "Possible levels are numbers or:\n", arg);
924  for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++)
925  av_log(NULL, AV_LOG_FATAL, "\"%s\"\n", log_levels[i].name);
926  exit_program(1);
927  }
928 
929 end:
932  return 0;
933 }
934 
935 static void expand_filename_template(AVBPrint *bp, const char *template,
936  struct tm *tm)
937 {
938  int c;
939 
940  while ((c = *(template++))) {
941  if (c == '%') {
942  if (!(c = *(template++)))
943  break;
944  switch (c) {
945  case 'p':
946  av_bprintf(bp, "%s", program_name);
947  break;
948  case 't':
949  av_bprintf(bp, "%04d%02d%02d-%02d%02d%02d",
950  tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
951  tm->tm_hour, tm->tm_min, tm->tm_sec);
952  break;
953  case '%':
954  av_bprint_chars(bp, c, 1);
955  break;
956  }
957  } else {
958  av_bprint_chars(bp, c, 1);
959  }
960  }
961 }
962 
963 static int init_report(const char *env)
964 {
965  char *filename_template = NULL;
966  char *key, *val;
967  int ret, count = 0;
968  int prog_loglevel, envlevel = 0;
969  time_t now;
970  struct tm *tm;
971  AVBPrint filename;
972 
973  if (report_file) /* already opened */
974  return 0;
975  time(&now);
976  tm = localtime(&now);
977 
978  while (env && *env) {
979  if ((ret = av_opt_get_key_value(&env, "=", ":", 0, &key, &val)) < 0) {
980  if (count)
982  "Failed to parse FFREPORT environment variable: %s\n",
983  av_err2str(ret));
984  break;
985  }
986  if (*env)
987  env++;
988  count++;
989  if (!strcmp(key, "file")) {
990  av_free(filename_template);
991  filename_template = val;
992  val = NULL;
993  } else if (!strcmp(key, "level")) {
994  char *tail;
995  report_file_level = strtol(val, &tail, 10);
996  if (*tail) {
997  av_log(NULL, AV_LOG_FATAL, "Invalid report file level\n");
998  exit_program(1);
999  }
1000  envlevel = 1;
1001  } else {
1002  av_log(NULL, AV_LOG_ERROR, "Unknown key '%s' in FFREPORT\n", key);
1003  }
1004  av_free(val);
1005  av_free(key);
1006  }
1007 
1009  expand_filename_template(&filename,
1010  av_x_if_null(filename_template, "%p-%t.log"), tm);
1011  av_free(filename_template);
1012  if (!av_bprint_is_complete(&filename)) {
1013  av_log(NULL, AV_LOG_ERROR, "Out of memory building report file name\n");
1014  return AVERROR(ENOMEM);
1015  }
1016 
1017  prog_loglevel = av_log_get_level();
1018  if (!envlevel)
1019  report_file_level = FFMAX(report_file_level, prog_loglevel);
1020 
1021  report_file = fopen(filename.str, "w");
1022  if (!report_file) {
1023  int ret = AVERROR(errno);
1024  av_log(NULL, AV_LOG_ERROR, "Failed to open report \"%s\": %s\n",
1025  filename.str, strerror(errno));
1026  return ret;
1027  }
1030  "%s started on %04d-%02d-%02d at %02d:%02d:%02d\n"
1031  "Report written to \"%s\"\n"
1032  "Log level: %d\n",
1033  program_name,
1034  tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
1035  tm->tm_hour, tm->tm_min, tm->tm_sec,
1036  filename.str, report_file_level);
1037  av_bprint_finalize(&filename, NULL);
1038  return 0;
1039 }
1040 
1041 int opt_report(void *optctx, const char *opt, const char *arg)
1042 {
1043  return init_report(NULL);
1044 }
1045 
1046 int opt_max_alloc(void *optctx, const char *opt, const char *arg)
1047 {
1048  char *tail;
1049  size_t max;
1050 
1051  max = strtol(arg, &tail, 10);
1052  if (*tail) {
1053  av_log(NULL, AV_LOG_FATAL, "Invalid max_alloc \"%s\".\n", arg);
1054  exit_program(1);
1055  }
1056  av_max_alloc(max);
1057  return 0;
1058 }
1059 
1060 int opt_timelimit(void *optctx, const char *opt, const char *arg)
1061 {
1062 #if HAVE_SETRLIMIT
1063  int lim = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
1064  struct rlimit rl = { lim, lim + 1 };
1065  if (setrlimit(RLIMIT_CPU, &rl))
1066  perror("setrlimit");
1067 #else
1068  av_log(NULL, AV_LOG_WARNING, "-%s not implemented on this OS\n", opt);
1069 #endif
1070  return 0;
1071 }
1072 
1073 void print_error(const char *filename, int err)
1074 {
1075  char errbuf[128];
1076  const char *errbuf_ptr = errbuf;
1077 
1078  if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
1079  errbuf_ptr = strerror(AVUNERROR(err));
1080  av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr);
1081 }
1082 
1083 static int warned_cfg = 0;
1084 
1085 #define INDENT 1
1086 #define SHOW_VERSION 2
1087 #define SHOW_CONFIG 4
1088 #define SHOW_COPYRIGHT 8
1089 
1090 #define PRINT_LIB_INFO(libname, LIBNAME, flags, level) \
1091  if (CONFIG_##LIBNAME) { \
1092  const char *indent = flags & INDENT? " " : ""; \
1093  if (flags & SHOW_VERSION) { \
1094  unsigned int version = libname##_version(); \
1095  av_log(NULL, level, \
1096  "%slib%-11s %2d.%3d.%3d / %2d.%3d.%3d\n", \
1097  indent, #libname, \
1098  LIB##LIBNAME##_VERSION_MAJOR, \
1099  LIB##LIBNAME##_VERSION_MINOR, \
1100  LIB##LIBNAME##_VERSION_MICRO, \
1101  AV_VERSION_MAJOR(version), AV_VERSION_MINOR(version),\
1102  AV_VERSION_MICRO(version)); \
1103  } \
1104  if (flags & SHOW_CONFIG) { \
1105  const char *cfg = libname##_configuration(); \
1106  if (strcmp(FFMPEG_CONFIGURATION, cfg)) { \
1107  if (!warned_cfg) { \
1108  av_log(NULL, level, \
1109  "%sWARNING: library configuration mismatch\n", \
1110  indent); \
1111  warned_cfg = 1; \
1112  } \
1113  av_log(NULL, level, "%s%-11s configuration: %s\n", \
1114  indent, #libname, cfg); \
1115  } \
1116  } \
1117  } \
1118 
1119 static void print_all_libs_info(int flags, int level)
1120 {
1121  PRINT_LIB_INFO(avutil, AVUTIL, flags, level);
1122  PRINT_LIB_INFO(avcodec, AVCODEC, flags, level);
1123  PRINT_LIB_INFO(avformat, AVFORMAT, flags, level);
1124  PRINT_LIB_INFO(avdevice, AVDEVICE, flags, level);
1125  PRINT_LIB_INFO(avfilter, AVFILTER, flags, level);
1126  PRINT_LIB_INFO(swscale, SWSCALE, flags, level);
1127  PRINT_LIB_INFO(swresample, SWRESAMPLE, flags, level);
1128  PRINT_LIB_INFO(postproc, POSTPROC, flags, level);
1129 }
1130 
1131 static void print_program_info(int flags, int level)
1132 {
1133  const char *indent = flags & INDENT? " " : "";
1134 
1135  av_log(NULL, level, "%s version " FFMPEG_VERSION, program_name);
1136  if (flags & SHOW_COPYRIGHT)
1137  av_log(NULL, level, " Copyright (c) %d-%d the FFmpeg developers",
1138  program_birth_year, CONFIG_THIS_YEAR);
1139  av_log(NULL, level, "\n");
1140  av_log(NULL, level, "%sbuilt with %s\n", indent, CC_IDENT);
1141 
1142  av_log(NULL, level, "%sconfiguration: " FFMPEG_CONFIGURATION "\n", indent);
1143 }
1144 
1145 static void print_buildconf(int flags, int level)
1146 {
1147  const char *indent = flags & INDENT ? " " : "";
1148  char str[] = { FFMPEG_CONFIGURATION };
1149  char *conflist, *remove_tilde, *splitconf;
1150 
1151  // Change all the ' --' strings to '~--' so that
1152  // they can be identified as tokens.
1153  while ((conflist = strstr(str, " --")) != NULL) {
1154  conflist[0] = '~';
1155  }
1156 
1157  // Compensate for the weirdness this would cause
1158  // when passing 'pkg-config --static'.
1159  while ((remove_tilde = strstr(str, "pkg-config~")) != NULL) {
1160  remove_tilde[sizeof("pkg-config~") - 2] = ' ';
1161  }
1162 
1163  splitconf = strtok(str, "~");
1164  av_log(NULL, level, "\n%sconfiguration:\n", indent);
1165  while (splitconf != NULL) {
1166  av_log(NULL, level, "%s%s%s\n", indent, indent, splitconf);
1167  splitconf = strtok(NULL, "~");
1168  }
1169 }
1170 
1171 void show_banner(int argc, char **argv, const OptionDef *options)
1172 {
1173  int idx = locate_option(argc, argv, options, "version");
1174  if (hide_banner || idx)
1175  return;
1176 
1180 }
1181 
1182 int show_version(void *optctx, const char *opt, const char *arg)
1183 {
1187 
1188  return 0;
1189 }
1190 
1191 int show_buildconf(void *optctx, const char *opt, const char *arg)
1192 {
1195 
1196  return 0;
1197 }
1198 
1199 int show_license(void *optctx, const char *opt, const char *arg)
1200 {
1201 #if CONFIG_NONFREE
1202  printf(
1203  "This version of %s has nonfree parts compiled in.\n"
1204  "Therefore it is not legally redistributable.\n",
1205  program_name );
1206 #elif CONFIG_GPLV3
1207  printf(
1208  "%s is free software; you can redistribute it and/or modify\n"
1209  "it under the terms of the GNU General Public License as published by\n"
1210  "the Free Software Foundation; either version 3 of the License, or\n"
1211  "(at your option) any later version.\n"
1212  "\n"
1213  "%s is distributed in the hope that it will be useful,\n"
1214  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1215  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1216  "GNU General Public License for more details.\n"
1217  "\n"
1218  "You should have received a copy of the GNU General Public License\n"
1219  "along with %s. If not, see <http://www.gnu.org/licenses/>.\n",
1221 #elif CONFIG_GPL
1222  printf(
1223  "%s is free software; you can redistribute it and/or modify\n"
1224  "it under the terms of the GNU General Public License as published by\n"
1225  "the Free Software Foundation; either version 2 of the License, or\n"
1226  "(at your option) any later version.\n"
1227  "\n"
1228  "%s is distributed in the hope that it will be useful,\n"
1229  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1230  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1231  "GNU General Public License for more details.\n"
1232  "\n"
1233  "You should have received a copy of the GNU General Public License\n"
1234  "along with %s; if not, write to the Free Software\n"
1235  "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n",
1237 #elif CONFIG_LGPLV3
1238  printf(
1239  "%s is free software; you can redistribute it and/or modify\n"
1240  "it under the terms of the GNU Lesser General Public License as published by\n"
1241  "the Free Software Foundation; either version 3 of the License, or\n"
1242  "(at your option) any later version.\n"
1243  "\n"
1244  "%s is distributed in the hope that it will be useful,\n"
1245  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1246  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1247  "GNU Lesser General Public License for more details.\n"
1248  "\n"
1249  "You should have received a copy of the GNU Lesser General Public License\n"
1250  "along with %s. If not, see <http://www.gnu.org/licenses/>.\n",
1252 #else
1253  printf(
1254  "%s is free software; you can redistribute it and/or\n"
1255  "modify it under the terms of the GNU Lesser General Public\n"
1256  "License as published by the Free Software Foundation; either\n"
1257  "version 2.1 of the License, or (at your option) any later version.\n"
1258  "\n"
1259  "%s is distributed in the hope that it will be useful,\n"
1260  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1261  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n"
1262  "Lesser General Public License for more details.\n"
1263  "\n"
1264  "You should have received a copy of the GNU Lesser General Public\n"
1265  "License along with %s; if not, write to the Free Software\n"
1266  "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n",
1268 #endif
1269 
1270  return 0;
1271 }
1272 
1273 static int is_device(const AVClass *avclass)
1274 {
1275  if (!avclass)
1276  return 0;
1277  return AV_IS_INPUT_DEVICE(avclass->category) || AV_IS_OUTPUT_DEVICE(avclass->category);
1278 }
1279 
1280 static int show_formats_devices(void *optctx, const char *opt, const char *arg, int device_only, int muxdemuxers)
1281 {
1282  void *ifmt_opaque = NULL;
1283  const AVInputFormat *ifmt = NULL;
1284  void *ofmt_opaque = NULL;
1285  const AVOutputFormat *ofmt = NULL;
1286  const char *last_name;
1287  int is_dev;
1288 
1289  printf("%s\n"
1290  " D. = Demuxing supported\n"
1291  " .E = Muxing supported\n"
1292  " --\n", device_only ? "Devices:" : "File formats:");
1293  last_name = "000";
1294  for (;;) {
1295  int decode = 0;
1296  int encode = 0;
1297  const char *name = NULL;
1298  const char *long_name = NULL;
1299 
1300  if (muxdemuxers !=SHOW_DEMUXERS) {
1301  ofmt_opaque = NULL;
1302  while ((ofmt = av_muxer_iterate(&ofmt_opaque))) {
1303  is_dev = is_device(ofmt->priv_class);
1304  if (!is_dev && device_only)
1305  continue;
1306  if ((!name || strcmp(ofmt->name, name) < 0) &&
1307  strcmp(ofmt->name, last_name) > 0) {
1308  name = ofmt->name;
1309  long_name = ofmt->long_name;
1310  encode = 1;
1311  }
1312  }
1313  }
1314  if (muxdemuxers != SHOW_MUXERS) {
1315  ifmt_opaque = NULL;
1316  while ((ifmt = av_demuxer_iterate(&ifmt_opaque))) {
1317  is_dev = is_device(ifmt->priv_class);
1318  if (!is_dev && device_only)
1319  continue;
1320  if ((!name || strcmp(ifmt->name, name) < 0) &&
1321  strcmp(ifmt->name, last_name) > 0) {
1322  name = ifmt->name;
1323  long_name = ifmt->long_name;
1324  encode = 0;
1325  }
1326  if (name && strcmp(ifmt->name, name) == 0)
1327  decode = 1;
1328  }
1329  }
1330  if (!name)
1331  break;
1332  last_name = name;
1333 
1334  printf(" %s%s %-15s %s\n",
1335  decode ? "D" : " ",
1336  encode ? "E" : " ",
1337  name,
1338  long_name ? long_name:" ");
1339  }
1340  return 0;
1341 }
1342 
1343 int show_formats(void *optctx, const char *opt, const char *arg)
1344 {
1345  return show_formats_devices(optctx, opt, arg, 0, SHOW_DEFAULT);
1346 }
1347 
1348 int show_muxers(void *optctx, const char *opt, const char *arg)
1349 {
1350  return show_formats_devices(optctx, opt, arg, 0, SHOW_MUXERS);
1351 }
1352 
1353 int show_demuxers(void *optctx, const char *opt, const char *arg)
1354 {
1355  return show_formats_devices(optctx, opt, arg, 0, SHOW_DEMUXERS);
1356 }
1357 
1358 int show_devices(void *optctx, const char *opt, const char *arg)
1359 {
1360  return show_formats_devices(optctx, opt, arg, 1, SHOW_DEFAULT);
1361 }
1362 
1363 #define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name) \
1364  if (codec->field) { \
1365  const type *p = codec->field; \
1366  \
1367  printf(" Supported " list_name ":"); \
1368  while (*p != term) { \
1369  get_name(*p); \
1370  printf(" %s", name); \
1371  p++; \
1372  } \
1373  printf("\n"); \
1374  } \
1375 
1376 static void print_codec(const AVCodec *c)
1377 {
1378  int encoder = av_codec_is_encoder(c);
1379 
1380  printf("%s %s [%s]:\n", encoder ? "Encoder" : "Decoder", c->name,
1381  c->long_name ? c->long_name : "");
1382 
1383  printf(" General capabilities: ");
1384  if (c->capabilities & AV_CODEC_CAP_DRAW_HORIZ_BAND)
1385  printf("horizband ");
1386  if (c->capabilities & AV_CODEC_CAP_DR1)
1387  printf("dr1 ");
1388  if (c->capabilities & AV_CODEC_CAP_TRUNCATED)
1389  printf("trunc ");
1390  if (c->capabilities & AV_CODEC_CAP_DELAY)
1391  printf("delay ");
1392  if (c->capabilities & AV_CODEC_CAP_SMALL_LAST_FRAME)
1393  printf("small ");
1394  if (c->capabilities & AV_CODEC_CAP_SUBFRAMES)
1395  printf("subframes ");
1396  if (c->capabilities & AV_CODEC_CAP_EXPERIMENTAL)
1397  printf("exp ");
1398  if (c->capabilities & AV_CODEC_CAP_CHANNEL_CONF)
1399  printf("chconf ");
1400  if (c->capabilities & AV_CODEC_CAP_PARAM_CHANGE)
1401  printf("paramchange ");
1402  if (c->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
1403  printf("variable ");
1404  if (c->capabilities & (AV_CODEC_CAP_FRAME_THREADS |
1407  printf("threads ");
1408  if (c->capabilities & AV_CODEC_CAP_AVOID_PROBING)
1409  printf("avoidprobe ");
1410  if (c->capabilities & AV_CODEC_CAP_HARDWARE)
1411  printf("hardware ");
1412  if (c->capabilities & AV_CODEC_CAP_HYBRID)
1413  printf("hybrid ");
1414  if (!c->capabilities)
1415  printf("none");
1416  printf("\n");
1417 
1418  if (c->type == AVMEDIA_TYPE_VIDEO ||
1419  c->type == AVMEDIA_TYPE_AUDIO) {
1420  printf(" Threading capabilities: ");
1421  switch (c->capabilities & (AV_CODEC_CAP_FRAME_THREADS |
1425  AV_CODEC_CAP_SLICE_THREADS: printf("frame and slice"); break;
1426  case AV_CODEC_CAP_FRAME_THREADS: printf("frame"); break;
1427  case AV_CODEC_CAP_SLICE_THREADS: printf("slice"); break;
1428  case AV_CODEC_CAP_OTHER_THREADS: printf("other"); break;
1429  default: printf("none"); break;
1430  }
1431  printf("\n");
1432  }
1433 
1434  if (avcodec_get_hw_config(c, 0)) {
1435  printf(" Supported hardware devices: ");
1436  for (int i = 0;; i++) {
1438  if (!config)
1439  break;
1440  printf("%s ", av_hwdevice_get_type_name(config->device_type));
1441  }
1442  printf("\n");
1443  }
1444 
1445  if (c->supported_framerates) {
1446  const AVRational *fps = c->supported_framerates;
1447 
1448  printf(" Supported framerates:");
1449  while (fps->num) {
1450  printf(" %d/%d", fps->num, fps->den);
1451  fps++;
1452  }
1453  printf("\n");
1454  }
1455  PRINT_CODEC_SUPPORTED(c, pix_fmts, enum AVPixelFormat, "pixel formats",
1457  PRINT_CODEC_SUPPORTED(c, supported_samplerates, int, "sample rates", 0,
1459  PRINT_CODEC_SUPPORTED(c, sample_fmts, enum AVSampleFormat, "sample formats",
1461  PRINT_CODEC_SUPPORTED(c, channel_layouts, uint64_t, "channel layouts",
1462  0, GET_CH_LAYOUT_DESC);
1463 
1464  if (c->priv_class) {
1465  show_help_children(c->priv_class,
1468  }
1469 }
1470 
1472 {
1473  switch (type) {
1474  case AVMEDIA_TYPE_VIDEO: return 'V';
1475  case AVMEDIA_TYPE_AUDIO: return 'A';
1476  case AVMEDIA_TYPE_DATA: return 'D';
1477  case AVMEDIA_TYPE_SUBTITLE: return 'S';
1478  case AVMEDIA_TYPE_ATTACHMENT:return 'T';
1479  default: return '?';
1480  }
1481 }
1482 
1483 static const AVCodec *next_codec_for_id(enum AVCodecID id, void **iter,
1484  int encoder)
1485 {
1486  const AVCodec *c;
1487  while ((c = av_codec_iterate(iter))) {
1488  if (c->id == id &&
1489  (encoder ? av_codec_is_encoder(c) : av_codec_is_decoder(c)))
1490  return c;
1491  }
1492  return NULL;
1493 }
1494 
1495 static int compare_codec_desc(const void *a, const void *b)
1496 {
1497  const AVCodecDescriptor * const *da = a;
1498  const AVCodecDescriptor * const *db = b;
1499 
1500  return (*da)->type != (*db)->type ? FFDIFFSIGN((*da)->type, (*db)->type) :
1501  strcmp((*da)->name, (*db)->name);
1502 }
1503 
1504 static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs)
1505 {
1506  const AVCodecDescriptor *desc = NULL;
1507  const AVCodecDescriptor **codecs;
1508  unsigned nb_codecs = 0, i = 0;
1509 
1510  while ((desc = avcodec_descriptor_next(desc)))
1511  nb_codecs++;
1512  if (!(codecs = av_calloc(nb_codecs, sizeof(*codecs)))) {
1513  av_log(NULL, AV_LOG_ERROR, "Out of memory\n");
1514  exit_program(1);
1515  }
1516  desc = NULL;
1517  while ((desc = avcodec_descriptor_next(desc)))
1518  codecs[i++] = desc;
1519  av_assert0(i == nb_codecs);
1520  qsort(codecs, nb_codecs, sizeof(*codecs), compare_codec_desc);
1521  *rcodecs = codecs;
1522  return nb_codecs;
1523 }
1524 
1525 static void print_codecs_for_id(enum AVCodecID id, int encoder)
1526 {
1527  void *iter = NULL;
1528  const AVCodec *codec;
1529 
1530  printf(" (%s: ", encoder ? "encoders" : "decoders");
1531 
1532  while ((codec = next_codec_for_id(id, &iter, encoder)))
1533  printf("%s ", codec->name);
1534 
1535  printf(")");
1536 }
1537 
1538 int show_codecs(void *optctx, const char *opt, const char *arg)
1539 {
1540  const AVCodecDescriptor **codecs;
1541  unsigned i, nb_codecs = get_codecs_sorted(&codecs);
1542 
1543  printf("Codecs:\n"
1544  " D..... = Decoding supported\n"
1545  " .E.... = Encoding supported\n"
1546  " ..V... = Video codec\n"
1547  " ..A... = Audio codec\n"
1548  " ..S... = Subtitle codec\n"
1549  " ...I.. = Intra frame-only codec\n"
1550  " ....L. = Lossy compression\n"
1551  " .....S = Lossless compression\n"
1552  " -------\n");
1553  for (i = 0; i < nb_codecs; i++) {
1554  const AVCodecDescriptor *desc = codecs[i];
1555  const AVCodec *codec;
1556  void *iter = NULL;
1557 
1558  if (strstr(desc->name, "_deprecated"))
1559  continue;
1560 
1561  printf(" ");
1562  printf(avcodec_find_decoder(desc->id) ? "D" : ".");
1563  printf(avcodec_find_encoder(desc->id) ? "E" : ".");
1564 
1565  printf("%c", get_media_type_char(desc->type));
1566  printf((desc->props & AV_CODEC_PROP_INTRA_ONLY) ? "I" : ".");
1567  printf((desc->props & AV_CODEC_PROP_LOSSY) ? "L" : ".");
1568  printf((desc->props & AV_CODEC_PROP_LOSSLESS) ? "S" : ".");
1569 
1570  printf(" %-20s %s", desc->name, desc->long_name ? desc->long_name : "");
1571 
1572  /* print decoders/encoders when there's more than one or their
1573  * names are different from codec name */
1574  while ((codec = next_codec_for_id(desc->id, &iter, 0))) {
1575  if (strcmp(codec->name, desc->name)) {
1576  print_codecs_for_id(desc->id, 0);
1577  break;
1578  }
1579  }
1580  iter = NULL;
1581  while ((codec = next_codec_for_id(desc->id, &iter, 1))) {
1582  if (strcmp(codec->name, desc->name)) {
1583  print_codecs_for_id(desc->id, 1);
1584  break;
1585  }
1586  }
1587 
1588  printf("\n");
1589  }
1590  av_free(codecs);
1591  return 0;
1592 }
1593 
1594 static void print_codecs(int encoder)
1595 {
1596  const AVCodecDescriptor **codecs;
1597  unsigned i, nb_codecs = get_codecs_sorted(&codecs);
1598 
1599  printf("%s:\n"
1600  " V..... = Video\n"
1601  " A..... = Audio\n"
1602  " S..... = Subtitle\n"
1603  " .F.... = Frame-level multithreading\n"
1604  " ..S... = Slice-level multithreading\n"
1605  " ...X.. = Codec is experimental\n"
1606  " ....B. = Supports draw_horiz_band\n"
1607  " .....D = Supports direct rendering method 1\n"
1608  " ------\n",
1609  encoder ? "Encoders" : "Decoders");
1610  for (i = 0; i < nb_codecs; i++) {
1611  const AVCodecDescriptor *desc = codecs[i];
1612  const AVCodec *codec;
1613  void *iter = NULL;
1614 
1615  while ((codec = next_codec_for_id(desc->id, &iter, encoder))) {
1616  printf(" %c", get_media_type_char(desc->type));
1617  printf((codec->capabilities & AV_CODEC_CAP_FRAME_THREADS) ? "F" : ".");
1618  printf((codec->capabilities & AV_CODEC_CAP_SLICE_THREADS) ? "S" : ".");
1619  printf((codec->capabilities & AV_CODEC_CAP_EXPERIMENTAL) ? "X" : ".");
1620  printf((codec->capabilities & AV_CODEC_CAP_DRAW_HORIZ_BAND)?"B" : ".");
1621  printf((codec->capabilities & AV_CODEC_CAP_DR1) ? "D" : ".");
1622 
1623  printf(" %-20s %s", codec->name, codec->long_name ? codec->long_name : "");
1624  if (strcmp(codec->name, desc->name))
1625  printf(" (codec %s)", desc->name);
1626 
1627  printf("\n");
1628  }
1629  }
1630  av_free(codecs);
1631 }
1632 
1633 int show_decoders(void *optctx, const char *opt, const char *arg)
1634 {
1635  print_codecs(0);
1636  return 0;
1637 }
1638 
1639 int show_encoders(void *optctx, const char *opt, const char *arg)
1640 {
1641  print_codecs(1);
1642  return 0;
1643 }
1644 
1645 int show_bsfs(void *optctx, const char *opt, const char *arg)
1646 {
1647  const AVBitStreamFilter *bsf = NULL;
1648  void *opaque = NULL;
1649 
1650  printf("Bitstream filters:\n");
1651  while ((bsf = av_bsf_iterate(&opaque)))
1652  printf("%s\n", bsf->name);
1653  printf("\n");
1654  return 0;
1655 }
1656 
1657 int show_protocols(void *optctx, const char *opt, const char *arg)
1658 {
1659  void *opaque = NULL;
1660  const char *name;
1661 
1662  printf("Supported file protocols:\n"
1663  "Input:\n");
1664  while ((name = avio_enum_protocols(&opaque, 0)))
1665  printf(" %s\n", name);
1666  printf("Output:\n");
1667  while ((name = avio_enum_protocols(&opaque, 1)))
1668  printf(" %s\n", name);
1669  return 0;
1670 }
1671 
1672 int show_filters(void *optctx, const char *opt, const char *arg)
1673 {
1674 #if CONFIG_AVFILTER
1675  const AVFilter *filter = NULL;
1676  char descr[64], *descr_cur;
1677  void *opaque = NULL;
1678  int i, j;
1679  const AVFilterPad *pad;
1680 
1681  printf("Filters:\n"
1682  " T.. = Timeline support\n"
1683  " .S. = Slice threading\n"
1684  " ..C = Command support\n"
1685  " A = Audio input/output\n"
1686  " V = Video input/output\n"
1687  " N = Dynamic number and/or type of input/output\n"
1688  " | = Source or sink filter\n");
1689  while ((filter = av_filter_iterate(&opaque))) {
1690  descr_cur = descr;
1691  for (i = 0; i < 2; i++) {
1692  if (i) {
1693  *(descr_cur++) = '-';
1694  *(descr_cur++) = '>';
1695  }
1696  pad = i ? filter->outputs : filter->inputs;
1697  for (j = 0; pad && avfilter_pad_get_name(pad, j); j++) {
1698  if (descr_cur >= descr + sizeof(descr) - 4)
1699  break;
1700  *(descr_cur++) = get_media_type_char(avfilter_pad_get_type(pad, j));
1701  }
1702  if (!j)
1703  *(descr_cur++) = ((!i && (filter->flags & AVFILTER_FLAG_DYNAMIC_INPUTS)) ||
1704  ( i && (filter->flags & AVFILTER_FLAG_DYNAMIC_OUTPUTS))) ? 'N' : '|';
1705  }
1706  *descr_cur = 0;
1707  printf(" %c%c%c %-17s %-10s %s\n",
1708  filter->flags & AVFILTER_FLAG_SUPPORT_TIMELINE ? 'T' : '.',
1709  filter->flags & AVFILTER_FLAG_SLICE_THREADS ? 'S' : '.',
1710  filter->process_command ? 'C' : '.',
1711  filter->name, descr, filter->description);
1712  }
1713 #else
1714  printf("No filters available: libavfilter disabled\n");
1715 #endif
1716  return 0;
1717 }
1718 
1719 int show_colors(void *optctx, const char *opt, const char *arg)
1720 {
1721  const char *name;
1722  const uint8_t *rgb;
1723  int i;
1724 
1725  printf("%-32s #RRGGBB\n", "name");
1726 
1727  for (i = 0; name = av_get_known_color_name(i, &rgb); i++)
1728  printf("%-32s #%02x%02x%02x\n", name, rgb[0], rgb[1], rgb[2]);
1729 
1730  return 0;
1731 }
1732 
1733 int show_pix_fmts(void *optctx, const char *opt, const char *arg)
1734 {
1735  const AVPixFmtDescriptor *pix_desc = NULL;
1736 
1737  printf("Pixel formats:\n"
1738  "I.... = Supported Input format for conversion\n"
1739  ".O... = Supported Output format for conversion\n"
1740  "..H.. = Hardware accelerated format\n"
1741  "...P. = Paletted format\n"
1742  "....B = Bitstream format\n"
1743  "FLAGS NAME NB_COMPONENTS BITS_PER_PIXEL\n"
1744  "-----\n");
1745 
1746 #if !CONFIG_SWSCALE
1747 # define sws_isSupportedInput(x) 0
1748 # define sws_isSupportedOutput(x) 0
1749 #endif
1750 
1751  while ((pix_desc = av_pix_fmt_desc_next(pix_desc))) {
1753  printf("%c%c%c%c%c %-16s %d %2d\n",
1754  sws_isSupportedInput (pix_fmt) ? 'I' : '.',
1755  sws_isSupportedOutput(pix_fmt) ? 'O' : '.',
1756  pix_desc->flags & AV_PIX_FMT_FLAG_HWACCEL ? 'H' : '.',
1757  pix_desc->flags & AV_PIX_FMT_FLAG_PAL ? 'P' : '.',
1758  pix_desc->flags & AV_PIX_FMT_FLAG_BITSTREAM ? 'B' : '.',
1759  pix_desc->name,
1760  pix_desc->nb_components,
1761  av_get_bits_per_pixel(pix_desc));
1762  }
1763  return 0;
1764 }
1765 
1766 int show_layouts(void *optctx, const char *opt, const char *arg)
1767 {
1768  int i = 0;
1769  uint64_t layout, j;
1770  const char *name, *descr;
1771 
1772  printf("Individual channels:\n"
1773  "NAME DESCRIPTION\n");
1774  for (i = 0; i < 63; i++) {
1775  name = av_get_channel_name((uint64_t)1 << i);
1776  if (!name)
1777  continue;
1778  descr = av_get_channel_description((uint64_t)1 << i);
1779  printf("%-14s %s\n", name, descr);
1780  }
1781  printf("\nStandard channel layouts:\n"
1782  "NAME DECOMPOSITION\n");
1783  for (i = 0; !av_get_standard_channel_layout(i, &layout, &name); i++) {
1784  if (name) {
1785  printf("%-14s ", name);
1786  for (j = 1; j; j <<= 1)
1787  if ((layout & j))
1788  printf("%s%s", (layout & (j - 1)) ? "+" : "", av_get_channel_name(j));
1789  printf("\n");
1790  }
1791  }
1792  return 0;
1793 }
1794 
1795 int show_sample_fmts(void *optctx, const char *opt, const char *arg)
1796 {
1797  int i;
1798  char fmt_str[128];
1799  for (i = -1; i < AV_SAMPLE_FMT_NB; i++)
1800  printf("%s\n", av_get_sample_fmt_string(fmt_str, sizeof(fmt_str), i));
1801  return 0;
1802 }
1803 
1804 static void show_help_codec(const char *name, int encoder)
1805 {
1806  const AVCodecDescriptor *desc;
1807  const AVCodec *codec;
1808 
1809  if (!name) {
1810  av_log(NULL, AV_LOG_ERROR, "No codec name specified.\n");
1811  return;
1812  }
1813 
1814  codec = encoder ? avcodec_find_encoder_by_name(name) :
1816 
1817  if (codec)
1818  print_codec(codec);
1819  else if ((desc = avcodec_descriptor_get_by_name(name))) {
1820  void *iter = NULL;
1821  int printed = 0;
1822 
1823  while ((codec = next_codec_for_id(desc->id, &iter, encoder))) {
1824  printed = 1;
1825  print_codec(codec);
1826  }
1827 
1828  if (!printed) {
1829  av_log(NULL, AV_LOG_ERROR, "Codec '%s' is known to FFmpeg, "
1830  "but no %s for it are available. FFmpeg might need to be "
1831  "recompiled with additional external libraries.\n",
1832  name, encoder ? "encoders" : "decoders");
1833  }
1834  } else {
1835  av_log(NULL, AV_LOG_ERROR, "Codec '%s' is not recognized by FFmpeg.\n",
1836  name);
1837  }
1838 }
1839 
1840 static void show_help_demuxer(const char *name)
1841 {
1842  const AVInputFormat *fmt = av_find_input_format(name);
1843 
1844  if (!fmt) {
1845  av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name);
1846  return;
1847  }
1848 
1849  printf("Demuxer %s [%s]:\n", fmt->name, fmt->long_name);
1850 
1851  if (fmt->extensions)
1852  printf(" Common extensions: %s.\n", fmt->extensions);
1853 
1854  if (fmt->priv_class)
1856 }
1857 
1858 static void show_help_protocol(const char *name)
1859 {
1860  const AVClass *proto_class;
1861 
1862  if (!name) {
1863  av_log(NULL, AV_LOG_ERROR, "No protocol name specified.\n");
1864  return;
1865  }
1866 
1867  proto_class = avio_protocol_get_class(name);
1868  if (!proto_class) {
1869  av_log(NULL, AV_LOG_ERROR, "Unknown protocol '%s'.\n", name);
1870  return;
1871  }
1872 
1874 }
1875 
1876 static void show_help_muxer(const char *name)
1877 {
1878  const AVCodecDescriptor *desc;
1879  const AVOutputFormat *fmt = av_guess_format(name, NULL, NULL);
1880 
1881  if (!fmt) {
1882  av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name);
1883  return;
1884  }
1885 
1886  printf("Muxer %s [%s]:\n", fmt->name, fmt->long_name);
1887 
1888  if (fmt->extensions)
1889  printf(" Common extensions: %s.\n", fmt->extensions);
1890  if (fmt->mime_type)
1891  printf(" Mime type: %s.\n", fmt->mime_type);
1892  if (fmt->video_codec != AV_CODEC_ID_NONE &&
1894  printf(" Default video codec: %s.\n", desc->name);
1895  }
1896  if (fmt->audio_codec != AV_CODEC_ID_NONE &&
1898  printf(" Default audio codec: %s.\n", desc->name);
1899  }
1900  if (fmt->subtitle_codec != AV_CODEC_ID_NONE &&
1902  printf(" Default subtitle codec: %s.\n", desc->name);
1903  }
1904 
1905  if (fmt->priv_class)
1907 }
1908 
1909 #if CONFIG_AVFILTER
1910 static void show_help_filter(const char *name)
1911 {
1912 #if CONFIG_AVFILTER
1913  const AVFilter *f = avfilter_get_by_name(name);
1914  int i, count;
1915 
1916  if (!name) {
1917  av_log(NULL, AV_LOG_ERROR, "No filter name specified.\n");
1918  return;
1919  } else if (!f) {
1920  av_log(NULL, AV_LOG_ERROR, "Unknown filter '%s'.\n", name);
1921  return;
1922  }
1923 
1924  printf("Filter %s\n", f->name);
1925  if (f->description)
1926  printf(" %s\n", f->description);
1927 
1928  if (f->flags & AVFILTER_FLAG_SLICE_THREADS)
1929  printf(" slice threading supported\n");
1930 
1931  printf(" Inputs:\n");
1932  count = avfilter_pad_count(f->inputs);
1933  for (i = 0; i < count; i++) {
1934  printf(" #%d: %s (%s)\n", i, avfilter_pad_get_name(f->inputs, i),
1936  }
1937  if (f->flags & AVFILTER_FLAG_DYNAMIC_INPUTS)
1938  printf(" dynamic (depending on the options)\n");
1939  else if (!count)
1940  printf(" none (source filter)\n");
1941 
1942  printf(" Outputs:\n");
1943  count = avfilter_pad_count(f->outputs);
1944  for (i = 0; i < count; i++) {
1945  printf(" #%d: %s (%s)\n", i, avfilter_pad_get_name(f->outputs, i),
1947  }
1948  if (f->flags & AVFILTER_FLAG_DYNAMIC_OUTPUTS)
1949  printf(" dynamic (depending on the options)\n");
1950  else if (!count)
1951  printf(" none (sink filter)\n");
1952 
1953  if (f->priv_class)
1956  if (f->flags & AVFILTER_FLAG_SUPPORT_TIMELINE)
1957  printf("This filter has support for timeline through the 'enable' option.\n");
1958 #else
1959  av_log(NULL, AV_LOG_ERROR, "Build without libavfilter; "
1960  "can not to satisfy request\n");
1961 #endif
1962 }
1963 #endif
1964 
1965 static void show_help_bsf(const char *name)
1966 {
1968 
1969  if (!name) {
1970  av_log(NULL, AV_LOG_ERROR, "No bitstream filter name specified.\n");
1971  return;
1972  } else if (!bsf) {
1973  av_log(NULL, AV_LOG_ERROR, "Unknown bit stream filter '%s'.\n", name);
1974  return;
1975  }
1976 
1977  printf("Bit stream filter %s\n", bsf->name);
1978  PRINT_CODEC_SUPPORTED(bsf, codec_ids, enum AVCodecID, "codecs",
1980  if (bsf->priv_class)
1982 }
1983 
1984 int show_help(void *optctx, const char *opt, const char *arg)
1985 {
1986  char *topic, *par;
1988 
1989  topic = av_strdup(arg ? arg : "");
1990  if (!topic)
1991  return AVERROR(ENOMEM);
1992  par = strchr(topic, '=');
1993  if (par)
1994  *par++ = 0;
1995 
1996  if (!*topic) {
1997  show_help_default(topic, par);
1998  } else if (!strcmp(topic, "decoder")) {
1999  show_help_codec(par, 0);
2000  } else if (!strcmp(topic, "encoder")) {
2001  show_help_codec(par, 1);
2002  } else if (!strcmp(topic, "demuxer")) {
2003  show_help_demuxer(par);
2004  } else if (!strcmp(topic, "muxer")) {
2005  show_help_muxer(par);
2006  } else if (!strcmp(topic, "protocol")) {
2007  show_help_protocol(par);
2008 #if CONFIG_AVFILTER
2009  } else if (!strcmp(topic, "filter")) {
2010  show_help_filter(par);
2011 #endif
2012  } else if (!strcmp(topic, "bsf")) {
2013  show_help_bsf(par);
2014  } else {
2015  show_help_default(topic, par);
2016  }
2017 
2018  av_freep(&topic);
2019  return 0;
2020 }
2021 
2022 int read_yesno(void)
2023 {
2024  int c = getchar();
2025  int yesno = (av_toupper(c) == 'Y');
2026 
2027  while (c != '\n' && c != EOF)
2028  c = getchar();
2029 
2030  return yesno;
2031 }
2032 
2033 FILE *get_preset_file(char *filename, size_t filename_size,
2034  const char *preset_name, int is_path,
2035  const char *codec_name)
2036 {
2037  FILE *f = NULL;
2038  int i;
2039  const char *base[3] = { getenv("FFMPEG_DATADIR"),
2040  getenv("HOME"),
2041  FFMPEG_DATADIR, };
2042 
2043  if (is_path) {
2044  av_strlcpy(filename, preset_name, filename_size);
2045  f = fopen(filename, "r");
2046  } else {
2047 #if HAVE_GETMODULEHANDLE && defined(_WIN32)
2048  char datadir[MAX_PATH], *ls;
2049  base[2] = NULL;
2050 
2051  if (GetModuleFileNameA(GetModuleHandleA(NULL), datadir, sizeof(datadir) - 1))
2052  {
2053  for (ls = datadir; ls < datadir + strlen(datadir); ls++)
2054  if (*ls == '\\') *ls = '/';
2055 
2056  if (ls = strrchr(datadir, '/'))
2057  {
2058  *ls = 0;
2059  strncat(datadir, "/ffpresets", sizeof(datadir) - 1 - strlen(datadir));
2060  base[2] = datadir;
2061  }
2062  }
2063 #endif
2064  for (i = 0; i < 3 && !f; i++) {
2065  if (!base[i])
2066  continue;
2067  snprintf(filename, filename_size, "%s%s/%s.ffpreset", base[i],
2068  i != 1 ? "" : "/.ffmpeg", preset_name);
2069  f = fopen(filename, "r");
2070  if (!f && codec_name) {
2071  snprintf(filename, filename_size,
2072  "%s%s/%s-%s.ffpreset",
2073  base[i], i != 1 ? "" : "/.ffmpeg", codec_name,
2074  preset_name);
2075  f = fopen(filename, "r");
2076  }
2077  }
2078  }
2079 
2080  return f;
2081 }
2082 
2083 int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
2084 {
2085  int ret = avformat_match_stream_specifier(s, st, spec);
2086  if (ret < 0)
2087  av_log(s, AV_LOG_ERROR, "Invalid stream specifier: %s.\n", spec);
2088  return ret;
2089 }
2090 
2092  AVFormatContext *s, AVStream *st, const AVCodec *codec)
2093 {
2094  AVDictionary *ret = NULL;
2095  AVDictionaryEntry *t = NULL;
2096  int flags = s->oformat ? AV_OPT_FLAG_ENCODING_PARAM
2098  char prefix = 0;
2099  const AVClass *cc = avcodec_get_class();
2100 
2101  if (!codec)
2102  codec = s->oformat ? avcodec_find_encoder(codec_id)
2104 
2105  switch (st->codecpar->codec_type) {
2106  case AVMEDIA_TYPE_VIDEO:
2107  prefix = 'v';
2109  break;
2110  case AVMEDIA_TYPE_AUDIO:
2111  prefix = 'a';
2113  break;
2114  case AVMEDIA_TYPE_SUBTITLE:
2115  prefix = 's';
2117  break;
2118  }
2119 
2120  while (t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX)) {
2121  const AVClass *priv_class;
2122  char *p = strchr(t->key, ':');
2123 
2124  /* check stream specification in opt name */
2125  if (p)
2126  switch (check_stream_specifier(s, st, p + 1)) {
2127  case 1: *p = 0; break;
2128  case 0: continue;
2129  default: exit_program(1);
2130  }
2131 
2132  if (av_opt_find(&cc, t->key, NULL, flags, AV_OPT_SEARCH_FAKE_OBJ) ||
2133  !codec ||
2134  ((priv_class = codec->priv_class) &&
2135  av_opt_find(&priv_class, t->key, NULL, flags,
2137  av_dict_set(&ret, t->key, t->value, 0);
2138  else if (t->key[0] == prefix &&
2139  av_opt_find(&cc, t->key + 1, NULL, flags,
2141  av_dict_set(&ret, t->key + 1, t->value, 0);
2142 
2143  if (p)
2144  *p = ':';
2145  }
2146  return ret;
2147 }
2148 
2151 {
2152  int i;
2153  AVDictionary **opts;
2154 
2155  if (!s->nb_streams)
2156  return NULL;
2157  opts = av_mallocz_array(s->nb_streams, sizeof(*opts));
2158  if (!opts) {
2160  "Could not alloc memory for stream options.\n");
2161  return NULL;
2162  }
2163  for (i = 0; i < s->nb_streams; i++)
2164  opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id,
2165  s, s->streams[i], NULL);
2166  return opts;
2167 }
2168 
2169 void *grow_array(void *array, int elem_size, int *size, int new_size)
2170 {
2171  if (new_size >= INT_MAX / elem_size) {
2172  av_log(NULL, AV_LOG_ERROR, "Array too big.\n");
2173  exit_program(1);
2174  }
2175  if (*size < new_size) {
2176  uint8_t *tmp = av_realloc_array(array, new_size, elem_size);
2177  if (!tmp) {
2178  av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n");
2179  exit_program(1);
2180  }
2181  memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size);
2182  *size = new_size;
2183  return tmp;
2184  }
2185  return array;
2186 }
2187 
2189 {
2190  uint8_t* displaymatrix = av_stream_get_side_data(st,
2192  double theta = 0;
2193  if (displaymatrix)
2194  theta = -av_display_rotation_get((int32_t*) displaymatrix);
2195 
2196  theta -= 360*floor(theta/360 + 0.9/360);
2197 
2198  if (fabs(theta - 90*round(theta/90)) > 2)
2199  av_log(NULL, AV_LOG_WARNING, "Odd rotation angle.\n"
2200  "If you want to help, upload a sample "
2201  "of this file to https://streams.videolan.org/upload/ "
2202  "and contact the ffmpeg-devel mailing list. (ffmpeg-devel@ffmpeg.org)");
2203 
2204  return theta;
2205 }
2206 
2207 #if CONFIG_AVDEVICE
2208 static int print_device_sources(const AVInputFormat *fmt, AVDictionary *opts)
2209 {
2210  int ret, i;
2211  AVDeviceInfoList *device_list = NULL;
2212 
2213  if (!fmt || !fmt->priv_class || !AV_IS_INPUT_DEVICE(fmt->priv_class->category))
2214  return AVERROR(EINVAL);
2215 
2216  printf("Auto-detected sources for %s:\n", fmt->name);
2217  if (!fmt->get_device_list) {
2218  ret = AVERROR(ENOSYS);
2219  printf("Cannot list sources. Not implemented.\n");
2220  goto fail;
2221  }
2222 
2223  if ((ret = avdevice_list_input_sources(fmt, NULL, opts, &device_list)) < 0) {
2224  printf("Cannot list sources.\n");
2225  goto fail;
2226  }
2227 
2228  for (i = 0; i < device_list->nb_devices; i++) {
2229  printf("%s %s [%s]\n", device_list->default_device == i ? "*" : " ",
2230  device_list->devices[i]->device_name, device_list->devices[i]->device_description);
2231  }
2232 
2233  fail:
2234  avdevice_free_list_devices(&device_list);
2235  return ret;
2236 }
2237 
2238 static int print_device_sinks(const AVOutputFormat *fmt, AVDictionary *opts)
2239 {
2240  int ret, i;
2241  AVDeviceInfoList *device_list = NULL;
2242 
2243  if (!fmt || !fmt->priv_class || !AV_IS_OUTPUT_DEVICE(fmt->priv_class->category))
2244  return AVERROR(EINVAL);
2245 
2246  printf("Auto-detected sinks for %s:\n", fmt->name);
2247  if (!fmt->get_device_list) {
2248  ret = AVERROR(ENOSYS);
2249  printf("Cannot list sinks. Not implemented.\n");
2250  goto fail;
2251  }
2252 
2253  if ((ret = avdevice_list_output_sinks(fmt, NULL, opts, &device_list)) < 0) {
2254  printf("Cannot list sinks.\n");
2255  goto fail;
2256  }
2257 
2258  for (i = 0; i < device_list->nb_devices; i++) {
2259  printf("%s %s [%s]\n", device_list->default_device == i ? "*" : " ",
2260  device_list->devices[i]->device_name, device_list->devices[i]->device_description);
2261  }
2262 
2263  fail:
2264  avdevice_free_list_devices(&device_list);
2265  return ret;
2266 }
2267 
2268 static int show_sinks_sources_parse_arg(const char *arg, char **dev, AVDictionary **opts)
2269 {
2270  int ret;
2271  if (arg) {
2272  char *opts_str = NULL;
2273  av_assert0(dev && opts);
2274  *dev = av_strdup(arg);
2275  if (!*dev)
2276  return AVERROR(ENOMEM);
2277  if ((opts_str = strchr(*dev, ','))) {
2278  *(opts_str++) = '\0';
2279  if (opts_str[0] && ((ret = av_dict_parse_string(opts, opts_str, "=", ":", 0)) < 0)) {
2280  av_freep(dev);
2281  return ret;
2282  }
2283  }
2284  } else
2285  printf("\nDevice name is not provided.\n"
2286  "You can pass devicename[,opt1=val1[,opt2=val2...]] as an argument.\n\n");
2287  return 0;
2288 }
2289 
2290 int show_sources(void *optctx, const char *opt, const char *arg)
2291 {
2292  const AVInputFormat *fmt = NULL;
2293  char *dev = NULL;
2294  AVDictionary *opts = NULL;
2295  int ret = 0;
2296  int error_level = av_log_get_level();
2297 
2299 
2300  if ((ret = show_sinks_sources_parse_arg(arg, &dev, &opts)) < 0)
2301  goto fail;
2302 
2303  do {
2304  fmt = av_input_audio_device_next(fmt);
2305  if (fmt) {
2306  if (!strcmp(fmt->name, "lavfi"))
2307  continue; //it's pointless to probe lavfi
2308  if (dev && !av_match_name(dev, fmt->name))
2309  continue;
2310  print_device_sources(fmt, opts);
2311  }
2312  } while (fmt);
2313  do {
2314  fmt = av_input_video_device_next(fmt);
2315  if (fmt) {
2316  if (dev && !av_match_name(dev, fmt->name))
2317  continue;
2318  print_device_sources(fmt, opts);
2319  }
2320  } while (fmt);
2321  fail:
2322  av_dict_free(&opts);
2323  av_free(dev);
2324  av_log_set_level(error_level);
2325  return ret;
2326 }
2327 
2328 int show_sinks(void *optctx, const char *opt, const char *arg)
2329 {
2330  const AVOutputFormat *fmt = NULL;
2331  char *dev = NULL;
2332  AVDictionary *opts = NULL;
2333  int ret = 0;
2334  int error_level = av_log_get_level();
2335 
2337 
2338  if ((ret = show_sinks_sources_parse_arg(arg, &dev, &opts)) < 0)
2339  goto fail;
2340 
2341  do {
2342  fmt = av_output_audio_device_next(fmt);
2343  if (fmt) {
2344  if (dev && !av_match_name(dev, fmt->name))
2345  continue;
2346  print_device_sinks(fmt, opts);
2347  }
2348  } while (fmt);
2349  do {
2350  fmt = av_output_video_device_next(fmt);
2351  if (fmt) {
2352  if (dev && !av_match_name(dev, fmt->name))
2353  continue;
2354  print_device_sinks(fmt, opts);
2355  }
2356  } while (fmt);
2357  fail:
2358  av_dict_free(&opts);
2359  av_free(dev);
2360  av_log_set_level(error_level);
2361  return ret;
2362 }
2363 
2364 #endif
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:30
OPT_FLOAT
#define OPT_FLOAT
Definition: cmdutils.h:168
add_bytes
static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
Definition: huffyuvdec.c:851
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
GET_ARG
#define GET_ARG(arg)
OPT_EXIT
#define OPT_EXIT
Definition: cmdutils.h:171
ff_get_audio_buffer
AVFrame * ff_get_audio_buffer(AVFilterLink *link, int nb_samples)
Request an audio samples buffer with a specific set of permissions.
Definition: audio.c:86
be
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it be(in the first position) for now. Options ------- Then comes the options array. This is what will define the user accessible options. For example
av_force_cpu_flags
void av_force_cpu_flags(int arg)
Disables cpu detection and forces the specified flags.
Definition: cpu.c:67
AVCodec
AVCodec.
Definition: codec.h:197
print_codecs_for_id
static void print_codecs_for_id(enum AVCodecID id, int encoder)
Definition: cmdutils.c:1525
L1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L1
Definition: snow.txt:554
OptionGroup::group_def
const OptionGroupDef * group_def
Definition: cmdutils.h:309
show_help_default
void show_help_default(const char *opt, const char *arg)
Per-fftool specific help handler.
Definition: ffmpeg_opt.c:3218
stride
int stride
Definition: mace.c:144
AVMEDIA_TYPE_SUBTITLE
@ AVMEDIA_TYPE_SUBTITLE
Definition: avutil.h:204
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:187
direct
static void direct(const float *in, const FFTComplex *ir, int len, float *out)
Definition: af_afir.c:60
process
static void process(NormalizeContext *s, AVFrame *in, AVFrame *out)
Definition: vf_normalize.c:156
draw_horiz_band
static void draw_horiz_band(AVCodecContext *ctx, const AVFrame *fr, int offset[4], int slice_position, int type, int height)
Definition: api-band-test.c:36
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
L2
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L2
Definition: snow.txt:554
level
uint8_t level
Definition: svq3.c:204
program
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C program
Definition: undefined.txt:6
INFINITY
#define INFINITY
Definition: mathematics.h:67
cast
The reader does not expect b to be semantically here and if the code is changed by maybe adding a cast
Definition: undefined.txt:36
sws_isSupportedInput
#define sws_isSupportedInput(x)
AVOutputFormat::extensions
const char * extensions
comma-separated filename extensions
Definition: avformat.h:499
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
mix
static int mix(int c0, int c1)
Definition: 4xm.c:716
AVOutputFormat::name
const char * name
Definition: avformat.h:491
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
nb_input_files
int nb_input_files
Definition: ffmpeg.c:151
avio_protocol_get_class
const AVClass * avio_protocol_get_class(const char *name)
Get AVClass by names of available protocols.
Definition: protocols.c:108
opt.h
AV_OPT_FLAG_VIDEO_PARAM
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:281
GET_SAMPLE_RATE_NAME
#define GET_SAMPLE_RATE_NAME(rate)
Definition: cmdutils.h:631
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:56
space
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
Definition: undefined.txt:4
AV_IS_INPUT_DEVICE
#define AV_IS_INPUT_DEVICE(category)
Definition: log.h:50
avfilter_pad_get_name
const char * avfilter_pad_get_name(const AVFilterPad *pads, int pad_idx)
Get the name of an AVFilterPad.
Definition: avfilter.c:932
OptionDef::off
size_t off
Definition: cmdutils.h:183
Then
status_out is the status that have been taken into it is final when it is not The typical task of an activate callback is to first check the backward status of output and if relevant forward it to the corresponding input Then
Definition: filter_design.txt:165
transforms
static const struct @72 transforms[18]
AVCodec::long_name
const char * long_name
Descriptive name for the codec, meant to be more human readable than name.
Definition: codec.h:209
libm.h
report_file
static FILE * report_file
Definition: cmdutils.c:71
show_formats
int show_formats(void *optctx, const char *opt, const char *arg)
Print a listing containing all the formats supported by the program (including devices).
Definition: cmdutils.c:1343
av_bprint_finalize
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:245
AV_CODEC_PROP_LOSSY
#define AV_CODEC_PROP_LOSSY
Codec supports lossy compression.
Definition: codec_desc.h:78
elements
static const ElemCat * elements[ELEMENT_COUNT]
Definition: signature.h:566
scheduling
===============The purpose of these rules is to ensure that frames flow in the filter graph without getting stuck and accumulating somewhere. Simple filters that output one frame for each input frame should not have to worry about it. There are two design for filters:one using the filter_frame() and request_frame() callbacks and the other using the activate() callback. The design using filter_frame() and request_frame() is legacy, but it is suitable for filters that have a single input and process one frame at a time. New filters with several inputs, that treat several frames at a time or that require a special treatment at EOF should probably use the design using activate(). activate -------- This method is called when something must be done in a filter scheduling
Definition: filter_design.txt:142
opt_report
int opt_report(void *optctx, const char *opt, const char *arg)
Definition: cmdutils.c:1041
MID_STATE
#define MID_STATE
Definition: snow.h:40
av_get_sample_fmt_string
char * av_get_sample_fmt_string(char *buf, int buf_size, enum AVSampleFormat sample_fmt)
Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is ne...
Definition: samplefmt.c:93
av_bprint_init
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
show_layouts
int show_layouts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the standard channel layouts supported by the program.
Definition: cmdutils.c:1766
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:215
nothing
static void nothing(void *foo)
Definition: dshow_capture.h:53
is
The official guide to swscale for confused that is
Definition: swscale.txt:28
playlist
Definition: hls.c:93
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:264
AV_LOG_QUIET
#define AV_LOG_QUIET
Print no output.
Definition: log.h:163
AVCodec::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: codec.h:223
init_parse_context
static void init_parse_context(OptionParseContext *octx, const OptionGroupDef *groups, int nb_groups)
Definition: cmdutils.c:690
developers
The official guide to swscale for confused developers
Definition: swscale.txt:2
SHOW_DEFAULT
@ SHOW_DEFAULT
Definition: cmdutils.c:76
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:978
log_callback_report
static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl)
Definition: cmdutils.c:100
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:953
va_copy.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
AV_CODEC_CAP_HARDWARE
#define AV_CODEC_CAP_HARDWARE
Codec is backed by a hardware implementation.
Definition: codec.h:157
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
AV_LOG_PANIC
#define AV_LOG_PANIC
Something went really wrong and we will crash now.
Definition: log.h:168
edgedetect
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited and testing changes the pixels in whatever fashion you and outputs the modified frame The most simple way of doing this is to take a similar filter We ll pick edgedetect
Definition: writing_filters.txt:16
AVDeviceInfo::device_name
char * device_name
device name, format depends on device
Definition: avdevice.h:458
sws_dict
AVDictionary * sws_dict
Definition: cmdutils.c:67
show_help_codec
static void show_help_codec(const char *name, int encoder)
Definition: cmdutils.c:1804
get_media_type_char
static char get_media_type_char(enum AVMediaType type)
Definition: cmdutils.c:1471
AVBitStreamFilter::name
const char * name
Definition: bsf.h:99
mv
static const int8_t mv[256][2]
Definition: 4xm.c:79
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
codecs
static struct codec_string codecs[]
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
show_version
int show_version(void *optctx, const char *opt, const char *arg)
Print the version of the program to stdout.
Definition: cmdutils.c:1182
basis
static int16_t basis[64][64]
Definition: mpegvideo_enc.c:4121
avformat_get_class
const AVClass * avformat_get_class(void)
Get the AVClass for AVFormatContext.
Definition: options.c:201
program_name
const char program_name[]
program name, defined by the program for show_version().
Definition: ffmpeg.c:109
design
Filter design
Definition: filter_design.txt:2
av_unused
#define av_unused
Definition: attributes.h:131
state
static struct @321 state
AVDeviceInfoList::nb_devices
int nb_devices
number of autodetected devices
Definition: avdevice.h:467
graph
fg outputs[0] graph
Definition: ffmpeg_filter.c:174
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
GET_PIX_FMT_NAME
#define GET_PIX_FMT_NAME(pix_fmt)
Definition: cmdutils.h:622
avcodec_find_encoder
const AVCodec * avcodec_find_encoder(enum AVCodecID id)
Find a registered encoder with a matching codec ID.
Definition: allcodecs.c:905
callbacks
static const OMX_CALLBACKTYPE callbacks
Definition: omx.c:332
AV_CODEC_CAP_TRUNCATED
#define AV_CODEC_CAP_TRUNCATED
Definition: codec.h:53
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:27
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:489
pixdesc.h
step
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about which is also called distortion Distortion can be quantified by almost any quality measurement one chooses the sum of squared differences is used but more complex methods that consider psychovisual effects can be used as well It makes no difference in this discussion First step
Definition: rate_distortion.txt:58
index
fg index
Definition: ffmpeg_filter.c:168
print_codec
static void print_codec(const AVCodec *c)
Definition: cmdutils.c:1376
AVCodec::capabilities
int capabilities
Codec capabilities.
Definition: codec.h:216
w
uint8_t w
Definition: llviddspenc.c:39
Rate
Rate
G723.1 rate values.
Definition: g723_1.h:72
OPT_INPUT
#define OPT_INPUT
Definition: cmdutils.h:178
even
Tag MUST be even
Definition: snow.txt:206
sources
Note except for filters that can have queued frames and sources
Definition: filter_design.txt:285
AVPixFmtDescriptor::name
const char * name
Definition: pixdesc.h:71
AVOption
AVOption.
Definition: opt.h:248
HAS_ARG
#define HAS_ARG
Definition: cmdutils.h:161
OptionGroupList::groups
OptionGroup * groups
Definition: cmdutils.h:329
b
#define b
Definition: input.c:41
chroma
static av_always_inline void chroma(WaveformContext *s, AVFrame *in, AVFrame *out, int component, int intensity, int offset_y, int offset_x, int column, int mirror, int jobnr, int nb_jobs)
Definition: vf_waveform.c:1624
table
static const uint16_t table[]
Definition: prosumer.c:206
likely
#define likely(x)
Definition: asm.h:33
OptionDef::dst_ptr
void * dst_ptr
Definition: cmdutils.h:181
OptionGroupList::nb_groups
int nb_groups
Definition: cmdutils.h:330
data
const char data[16]
Definition: mxf.c:142
linear
static int linear(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:127
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2548
format_opts
AVDictionary * format_opts
Definition: cmdutils.c:69
avio_enum_protocols
const char * avio_enum_protocols(void **opaque, int output)
Iterate through names of available protocols.
Definition: protocols.c:93
half
static uint8_t half(int a, int b)
Definition: mobiclip.c:541
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:395
integer
int integer
Definition: swresample_internal.h:37
convert
Definition: convert.py:1
AV_DICT_IGNORE_SUFFIX
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
Definition: dict.h:70
possible
the frame and frame reference mechanism is intended to as much as possible
Definition: filter_design.txt:45
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:196
FLAGS
#define FLAGS
Definition: cmdutils.c:539
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:197
F
#define F(x)
base
uint8_t base
Definition: vp3data.h:141
fc
#define fc(width, name, range_min, range_max)
Definition: cbs_av1.c:551
OptionGroup::swr_opts
AVDictionary * swr_opts
Definition: cmdutils.h:319
allocate
#define allocate(name, size)
Definition: cbs_h2645.c:432
show_help_children
void show_help_children(const AVClass *class, int flags)
Show help for all options with given flags in class and all its children.
Definition: cmdutils.c:202
AVOption::flags
int flags
Definition: opt.h:277
av_get_bits_per_pixel
int av_get_bits_per_pixel(const AVPixFmtDescriptor *pixdesc)
Return the number of bits per pixel used by the pixel format described by pixdesc.
Definition: pixdesc.c:2493
SHOW_COPYRIGHT
#define SHOW_COPYRIGHT
Definition: cmdutils.c:1088
max
#define max(a, b)
Definition: cuda_runtime.h:33
mathematics.h
filter
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
Definition: filter_design.txt:228
av_bsf_iterate
const AVBitStreamFilter * av_bsf_iterate(void **opaque)
Iterate over all registered bitstream filters.
Definition: bitstream_filters.c:67
AVDictionary
Definition: dict.c:30
Frame
Definition: ffplay.c:155
av_get_cpu_flags
int av_get_cpu_flags(void)
Return the flags which specify extensions supported by the CPU.
Definition: cpu.c:95
subbands
subbands
Definition: aptx.h:39
processed
status_in is a status change that must be taken into account after all frames in fifo have been processed
Definition: filter_design.txt:159
hide_banner
int hide_banner
Definition: cmdutils.c:73
config_props
static int config_props(AVFilterLink *outlink)
Definition: aeval.c:223
put_pixel
static void put_pixel(uint16_t *dst, ptrdiff_t linesize, const int16_t *in, int bits_per_raw_sample)
Add bias value, clamp and output pixels of a slice.
Definition: proresdsp.c:41
though
though
Definition: snow.txt:1
AV_OPT_FLAG_FILTERING_PARAM
#define AV_OPT_FLAG_FILTERING_PARAM
a generic parameter which can be set by the user for filtering
Definition: opt.h:294
Makefile
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter Makefile
Definition: writing_filters.txt:20
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
H0
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 H0
Definition: snow.txt:554
tf_sess_config.config
config
Definition: tf_sess_config.py:33
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
quality
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about quality
Definition: rate_distortion.txt:12
AVOutputFormat::subtitle_codec
enum AVCodecID subtitle_codec
default subtitle codec
Definition: avformat.h:503
av_input_audio_device_next
const AVInputFormat * av_input_audio_device_next(const AVInputFormat *d)
Audio input devices iterator.
Definition: alldevices.c:123
D
D(D(float, sse)
Definition: rematrix_init.c:29
OptionDef
Definition: cmdutils.h:158
AVUNERROR
#define AVUNERROR(e)
Definition: error.h:44
av_bsf_get_by_name
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
Definition: bitstream_filters.c:78
AVInputFormat::long_name
const char * long_name
Descriptive name for the format, meant to be more human-readable than name.
Definition: avformat.h:628
bit
#define bit(string, value)
Definition: cbs_mpeg2.c:58
avdevice_list_output_sinks
int avdevice_list_output_sinks(const AVOutputFormat *device, const char *device_name, AVDictionary *device_options, AVDeviceInfoList **device_list)
Definition: avdevice.c:134
A
#define A(x)
Definition: vp56_arith.h:28
exit_program
void exit_program(int ret)
Wraps exit with a program-specific cleanup routine.
Definition: cmdutils.c:132
InputStream
Definition: ffmpeg.h:300
Filter
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 Filter
Definition: snow.txt:554
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1376
av_max_alloc
void av_max_alloc(size_t max)
Set the maximum size that may be allocated in one block.
Definition: mem.c:73
parse_number_or_die
double parse_number_or_die(const char *context, const char *numstr, int type, double min, double max)
Parse a string and return its corresponding value as a double.
Definition: cmdutils.c:140
return
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a it should return
Definition: filter_design.txt:264
rgb
Definition: rpzaenc.c:58
print_error
void print_error(const char *filename, int err)
Print an error message to stderr, indicating filename and a human readable description of the error c...
Definition: cmdutils.c:1073
some
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that some(invalid) inputs can trigger overflows(undefined behavior). In these cases
decoder
static const chunk_decoder decoder[8]
Definition: dfa.c:330
OptionGroupList
A list of option groups that all have the same group type (e.g.
Definition: cmdutils.h:326
reasons
if it could not for temporary reasons
Definition: filter_design.txt:265
fail
#define fail()
Definition: checkasm.h:134
resolution
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this the other leaves the full chroma resolution
Definition: swscale.txt:54
av_strerror
int av_strerror(int errnum, char *errbuf, size_t errbuf_size)
Put a description of the AVERROR code errnum in errbuf.
Definition: error.c:105
show_decoders
int show_decoders(void *optctx, const char *opt, const char *arg)
Print a listing containing all the decoders supported by the program.
Definition: cmdutils.c:1633
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:129
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
relevant
status_out is the status that have been taken into it is final when it is not The typical task of an activate callback is to first check the backward status of output and if relevant forward it to the corresponding input if relevant
Definition: filter_design.txt:165
SHOW_CONFIG
#define SHOW_CONFIG
Definition: cmdutils.c:1087
av_filter_iterate
const AVFilter * av_filter_iterate(void **opaque)
Iterate over all registered filters.
Definition: allfilters.c:536
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
IA
#define IA(x)
Definition: cast5.c:26
av_parse_cpu_caps
int av_parse_cpu_caps(unsigned *flags, const char *s)
Parse CPU caps from a string and update the given AV_CPU_* flags based on that.
Definition: cpu.c:105
tables
Writing a table generator This documentation is preliminary Parts of the API are not good and should be changed Basic concepts A table generator consists of two *_tablegen c and *_tablegen h The h file will provide the variable declarations and initialization code for the tables
Definition: tablegen.txt:10
OptionParseContext
Definition: cmdutils.h:333
future
FFmpeg s bug feature request tracker new issues and changes to existing issues can be done through a web interface Issues can be different kinds of things we want to keep track of but that do not belong into the source tree itself This includes bug feature requests and license violations We might add more items to this list in the future
Definition: issue_tracker.txt:13
AVERROR_OPTION_NOT_FOUND
#define AVERROR_OPTION_NOT_FOUND
Option not found.
Definition: error.h:61
AV_BPRINT_SIZE_AUTOMATIC
#define AV_BPRINT_SIZE_AUTOMATIC
Option
An option extracted from the commandline.
Definition: cmdutils.h:287
variant
Definition: hls.c:180
val
static double val(void *priv, double ch)
Definition: aeval.c:76
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
update
static av_always_inline void update(SilenceDetectContext *s, AVFrame *insamples, int is_silence, int current_sample, int64_t nb_samples_notify, AVRational time_base)
Definition: af_silencedetect.c:78
pts
static int64_t pts
Definition: transcode_aac.c:652
account
status_out is the status that have been taken into account
Definition: filter_design.txt:160
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:95
us
#define us(width, name, range_min, range_max, subs,...)
Definition: cbs_h2645.c:278
AV_PKT_DATA_DISPLAYMATRIX
@ AV_PKT_DATA_DISPLAYMATRIX
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: packet.h:108
OptionGroup::nb_opts
int nb_opts
Definition: cmdutils.h:313
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:465
show_muxdemuxers
show_muxdemuxers
Definition: cmdutils.c:75
OPT_STRING
#define OPT_STRING
Definition: cmdutils.h:164
OptionGroupList::group_def
const OptionGroupDef * group_def
Definition: cmdutils.h:327
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:106
fast
static int fast
Definition: ffplay.c:334
OptionDef::help
const char * help
Definition: cmdutils.h:185
AVRational::num
int num
Numerator.
Definition: rational.h:59
idct
static void idct(int16_t block[64])
Definition: 4xm.c:165
InputFile
Definition: ffmpeg.h:400
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
show_help_bsf
static void show_help_bsf(const char *name)
Definition: cmdutils.c:1965
OptionGroupDef
Definition: cmdutils.h:293
qlogs
spatial_decomposition_type s header_state qlog s header_state mv_scale s header_state qbias s header_state block_max_depth s header_state qlogs
Definition: snow.txt:85
LH
#define LH(psrc)
Definition: generic_macros_msa.h:89
AVDeviceInfoList::devices
AVDeviceInfo ** devices
list of autodetected devices
Definition: avdevice.h:466
aligned
static int aligned(int val)
Definition: dashdec.c:168
C
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the C
Definition: writing_filters.txt:58
check_stream_specifier
int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
Check if the given stream matches a stream specifier.
Definition: cmdutils.c:2083
SHOW_VERSION
#define SHOW_VERSION
Definition: cmdutils.c:1086
first
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But first
Definition: rate_distortion.txt:12
avassert.h
variables
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile variables
Definition: build_system.txt:7
description
Tag description
Definition: snow.txt:206
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:207
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:181
print_buildconf
static void print_buildconf(int flags, int level)
Definition: cmdutils.c:1145
initFilter
static av_cold int initFilter(int16_t **outFilter, int32_t **filterPos, int *outFilterSize, int xInc, int srcW, int dstW, int filterAlign, int one, int flags, int cpu_flags, SwsVector *srcFilter, SwsVector *dstFilter, double param[2], int srcPos, int dstPos)
Definition: utils.c:330
AV_CODEC_CAP_EXPERIMENTAL
#define AV_CODEC_CAP_EXPERIMENTAL
Codec is experimental and is thus avoided in favor of non experimental encoders.
Definition: codec.h:100
AVInputFormat
Definition: avformat.h:616
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVInputFormat::extensions
const char * extensions
If extensions are defined, then no probe is done.
Definition: avformat.h:642
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
OptionGroup::codec_opts
AVDictionary * codec_opts
Definition: cmdutils.h:315
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:580
avdevice_list_input_sources
int avdevice_list_input_sources(const AVInputFormat *device, const char *device_name, AVDictionary *device_options, AVDeviceInfoList **device_list)
List devices.
Definition: avdevice.c:123
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
expand_filename_template
static void expand_filename_template(AVBPrint *bp, const char *template, struct tm *tm)
Definition: cmdutils.c:935
check_options
static void check_options(const OptionDef *po)
Definition: cmdutils.c:492
media_type_string
#define media_type_string
Definition: cmdutils.h:617
await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report await_progress()
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:587
check
#define check(x, y, S, v)
Definition: motion_est_template.c:405
YUV
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like YUV
Definition: swscale.txt:38
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
postprocess.h
class
#define class
Definition: math.h:25
av_log_format_line
void av_log_format_line(void *ptr, int level, const char *fmt, va_list vl, char *line, int line_size, int *print_prefix)
Format a line of log the same way as the default callback.
Definition: log.c:328
decode
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:189
OPT_INT
#define OPT_INT
Definition: cmdutils.h:167
ff_inlink_request_frame
void ff_inlink_request_frame(AVFilterLink *link)
Mark that a frame is wanted on the link.
Definition: avfilter.c:1502
input_streams
InputStream ** input_streams
Definition: ffmpeg.c:148
width
#define width
MC
#define MC(PEL, DIR, WIDTH)
Definition: hevcdsp_mips.h:26
AVCodecDescriptor
This struct describes the properties of a single codec described by an AVCodecID.
Definition: codec_desc.h:38
matter
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not matter(as it is from invalid input). In some cases the input can be checked easily in others checking the input is computationally too intensive. In these remaining cases a unsigned type can be used instead of a signed type. unsigned overflows are defined in C. SUINT ----- As we have above established there is a need to use "unsigned" sometimes in computations which work with signed integers(which overflow). Using "unsigned" for signed integers has the very significant potential to cause confusion as in unsigned a
s
#define s(width, name)
Definition: cbs_vp9.c:257
OptionDef::argname
const char * argname
Definition: cmdutils.h:186
split_commandline
int split_commandline(OptionParseContext *octx, int argc, char *argv[], const OptionDef *options, const OptionGroupDef *groups, int nb_groups)
Split the commandline into an intermediate form convenient for further processing.
Definition: cmdutils.c:738
AV_OPT_FLAG_ENCODING_PARAM
#define AV_OPT_FLAG_ENCODING_PARAM
a generic parameter which can be set by the user for muxing or encoding
Definition: opt.h:278
resample_opts
AVDictionary * resample_opts
Definition: cmdutils.c:69
offsets
static const int offsets[]
Definition: hevc_pel.c:34
av_realloc_array
void * av_realloc_array(void *ptr, size_t nmemb, size_t size)
Definition: mem.c:204
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
floor
static __device__ float floor(float a)
Definition: cuda_runtime.h:173
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:621
SWS_FULL_CHR_H_INP
#define SWS_FULL_CHR_H_INP
Definition: swscale.h:81
g
const char * g
Definition: vf_curves.c:117
changes
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API changes
Definition: writing_filters.txt:8
AVDictionaryEntry::key
char * key
Definition: dict.h:82
Option::key
const char * key
Definition: cmdutils.h:289
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
avfilter_pad_count
int avfilter_pad_count(const AVFilterPad *pads)
Get the number of elements in a NULL-terminated array of AVFilterPads (e.g.
Definition: avfilter.c:548
sse
static int sse(MpegEncContext *s, uint8_t *src1, uint8_t *src2, int w, int h, int stride)
Definition: mpegvideo_enc.c:2558
AV_CODEC_CAP_OTHER_THREADS
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
Definition: codec.h:122
info
MIPS optimizations info
Definition: mips.txt:2
swr_alloc
av_cold struct SwrContext * swr_alloc(void)
Allocate SwrContext.
Definition: options.c:149
bits
uint8_t bits
Definition: vp3data.h:141
from
const char * from
Definition: jacosubdec.c:66
to
const char * to
Definition: webvttdec.c:35
form
This is the more generic form
Definition: tablegen.txt:34
AVOutputFormat::audio_codec
enum AVCodecID audio_codec
default audio codec
Definition: avformat.h:501
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
get
static void get(uint8_t *pixels, int stride, int16_t *block)
Definition: proresenc_anatoliy.c:306
reaction
Note except for filters that can have queued frames and request_frame does not push and as a reaction
Definition: filter_design.txt:287
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:289
data
the buffer is automatically deallocated once all corresponding references have been destroyed The characteristics of the data(resolution, sample rate, etc.) are stored in the reference
av_get_channel_name
const char * av_get_channel_name(uint64_t channel)
Get the name of a given channel.
Definition: channel_layout.c:249
AVFilter::flags
int flags
A combination of AVFILTER_FLAG_*.
Definition: avfilter.h:188
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:202
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pointers
Undefined Behavior In the C some operations are like signed integer dereferencing freed pointers
Definition: undefined.txt:4
dump_argument
static void dump_argument(const char *a)
Definition: cmdutils.c:468
report_file_level
static int report_file_level
Definition: cmdutils.c:72
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
Slice
Definition: magicyuv.c:37
on
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going on
Definition: writing_filters.txt:34
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
write_fileheader
write_fileheader() adds some minor things like a "this is a generated file" comment and some standard includes. tablegen.h defines some write functions for one- and two-dimensional arrays for standard types - they print only the "core" parts so they are easier to reuse for multi-dimensional arrays so the outermost
Definition: tablegen.txt:39
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:92
export
static int export(AVFilterContext *ctx, StreamContext *sc, int input)
Definition: vf_signature.c:570
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
clients
=============================================Slice threading - *The client 's draw_horiz_band() must be thread-safe according to the comment in avcodec.h. Frame threading - *Restrictions with slice threading also apply. *Custom get_buffer2() and get_format() callbacks must be thread-safe. *There is one frame of delay added for every thread beyond the first one. Clients must be able to handle this clients
Definition: multithreading.txt:25
parse_options
void parse_options(void *optctx, int argc, char **argv, const OptionDef *options, void(*parse_arg_function)(void *, const char *))
Definition: cmdutils.c:378
AV_OPT_FLAG_BSF_PARAM
#define AV_OPT_FLAG_BSF_PARAM
a generic parameter which can be set by the user for bit stream filtering
Definition: opt.h:292
key
const char * key
Definition: hwcontext_opencl.c:168
SwrContext
The libswresample context.
Definition: swresample_internal.h:95
AVMEDIA_TYPE_DATA
@ AVMEDIA_TYPE_DATA
Opaque data information usually continuous.
Definition: avutil.h:203
XMM_CLOBBERS
#define XMM_CLOBBERS(...)
Definition: asm.h:98
f
#define f(width, name)
Definition: cbs_vp9.c:255
pass
#define pass
Definition: fft_template.c:603
command
static int command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Definition: vf_drawtext.c:873
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_OPT_FLAG_AUDIO_PARAM
#define AV_OPT_FLAG_AUDIO_PARAM
Definition: opt.h:280
compare_codec_desc
static int compare_codec_desc(const void *a, const void *b)
Definition: cmdutils.c:1495
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1420
av_opt_find
const AVOption * av_opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Look for an option in an object.
Definition: opt.c:1661
arg
const char * arg
Definition: jacosubdec.c:67
callback
static void callback(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, enum dshowDeviceType devtype)
Definition: dshow.c:161
included
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do and instead of the variable declarations the generated *_tables h file should be included Since that will be generated in the build the path must be included
Definition: tablegen.txt:59
fields
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the then the processing requires a frame on this link and the filter is expected to make efforts in that direction The status of input links is stored by the fifo and status_out fields
Definition: filter_design.txt:155
if
if(ret)
Definition: filter_design.txt:179
OPT_SPEC
#define OPT_SPEC
Definition: cmdutils.h:175
finish_group
static void finish_group(OptionParseContext *octx, int group_idx, const char *arg)
Definition: cmdutils.c:647
output_streams
OutputStream ** output_streams
Definition: ffmpeg.c:153
H2
vertical halfpel samples are found by H2[y][x]
Definition: snow.txt:421
AV_IS_OUTPUT_DEVICE
#define AV_IS_OUTPUT_DEVICE(category)
Definition: log.h:55
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:108
AVFormatContext
Format I/O context.
Definition: avformat.h:1106
negotiation
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format negotiation
Definition: filter_design.txt:12
av_log_get_level
int av_log_get_level(void)
Get the current log level.
Definition: log.c:435
need
must be printed separately If there s no standard function for printing the type you need
Definition: tablegen.txt:45
show_buildconf
int show_buildconf(void *optctx, const char *opt, const char *arg)
Print the build configuration of the program to stdout.
Definition: cmdutils.c:1191
AV_CODEC_PROP_INTRA_ONLY
#define AV_CODEC_PROP_INTRA_ONLY
Codec uses only intra compression.
Definition: codec_desc.h:72
avfilter_get_by_name
const AVFilter * avfilter_get_by_name(const char *name)
Get a filter definition matching the given name.
Definition: allfilters.c:547
quant_table
static const int16_t quant_table[64]
Definition: intrax8.c:522
init_dynload
void init_dynload(void)
Initialize dynamic library loading.
Definition: cmdutils.c:116
opts
AVDictionary * opts
Definition: movenc.c:50
OptionGroup::format_opts
AVDictionary * format_opts
Definition: cmdutils.h:316
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:979
main
int main(int argc, char *argv[])
Definition: avio_list_dir.c:112
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
avcodec_get_class
const AVClass * avcodec_get_class(void)
Get the AVClass for AVCodecContext.
Definition: options.c:174
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
fabs
static __device__ float fabs(float a)
Definition: cuda_runtime.h:182
NULL
#define NULL
Definition: coverity.c:32
flush
static void flush(AVCodecContext *avctx)
Definition: aacdec_template.c:592
frames
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across frames
Definition: multithreading.txt:36
OptionParseContext::global_opts
OptionGroup global_opts
Definition: cmdutils.h:334
avcodec_find_decoder_by_name
const AVCodec * avcodec_find_decoder_by_name(const char *name)
Find a registered decoder with the specified name.
Definition: allcodecs.c:938
Option::opt
const OptionDef * opt
Definition: cmdutils.h:288
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:536
run
uint8_t run
Definition: svq3.c:203
prepare_app_arguments
static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
Definition: cmdutils.c:284
AVPixFmtDescriptor::nb_components
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:72
push
static void push(HysteresisContext *s, int x, int y, int w)
Definition: vf_hysteresis.c:145
pixel
uint8_t pixel
Definition: tiny_ssim.c:42
anything
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do anything
Definition: tablegen.txt:56
swr_get_class
const AVClass * swr_get_class(void)
Get the AVClass for SwrContext.
Definition: options.c:144
frame_wanted_out
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the frame_wanted_out
Definition: filter_design.txt:148
LIBAVFILTER_VERSION_MICRO
#define LIBAVFILTER_VERSION_MICRO
Definition: version.h:34
contain
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should contain
Definition: tablegen.txt:55
transform
static const int8_t transform[32][32]
Definition: hevcdsp.c:27
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
period
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without period
Definition: writing_filters.txt:89
AVOutputFormat::get_device_list
int(* get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list)
Returns device list with it properties.
Definition: avformat.h:580
coefficients
static double coefficients[8 *8]
Definition: dctref.c:35
next_codec_for_id
static const AVCodec * next_codec_for_id(enum AVCodecID id, void **iter, int encoder)
Definition: cmdutils.c:1483
greater
static int greater(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:158
AVOutputFormat::long_name
const char * long_name
Descriptive name for the format, meant to be more human-readable than name.
Definition: avformat.h:497
show_formats_devices
static int show_formats_devices(void *optctx, const char *opt, const char *arg, int device_only, int muxdemuxers)
Definition: cmdutils.c:1280
activate
filter_frame For filters that do not use the activate() callback
H
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 this can end with a L or a H
Definition: snow.txt:555
system
FFmpeg currently uses a custom build system
Definition: build_system.txt:1
get_format
static int get_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts)
Definition: qsvdec.c:51
GET_CODEC_NAME
#define GET_CODEC_NAME(id)
Definition: cmdutils.h:625
warned_cfg
static int warned_cfg
Definition: cmdutils.c:1083
av_log_set_flags
void av_log_set_flags(int arg)
Definition: log.c:445
work
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do and instead of the variable declarations the generated *_tables h file should be included Since that will be generated in the build the path must be i e not Makefile changes To make the automatic table creation work
Definition: tablegen.txt:66
src
#define src
Definition: vp8dsp.c:255
parseutils.h
INDENT
#define INDENT
Definition: cmdutils.c:1085
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1086
show_muxers
int show_muxers(void *optctx, const char *opt, const char *arg)
Print a listing containing all the muxers supported by the program (including devices).
Definition: cmdutils.c:1348
L0
#define L0
Definition: hevcdec.h:60
init_opts
void init_opts(void)
Initialize the cmdutils option system, in particular allocate the *_opts contexts.
Definition: cmdutils.c:81
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
not
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If not
Definition: filter_design.txt:259
AVBitStreamFilter::priv_class
const AVClass * priv_class
A class for the private data, used to declare bitstream filter private AVOptions.
Definition: bsf.h:117
OPT_INT64
#define OPT_INT64
Definition: cmdutils.h:170
Prediction
Prediction
Definition: magicyuv.c:42
particular
different references for the same buffer can show different characteristics In particular
Definition: filter_design.txt:55
AV_CODEC_CAP_VARIABLE_FRAME_SIZE
#define AV_CODEC_CAP_VARIABLE_FRAME_SIZE
Audio encoder supports receiving a different number of samples in each call.
Definition: codec.h:129
av_cpu_max_align
size_t av_cpu_max_align(void)
Get the maximum data alignment that may be required by FFmpeg.
Definition: cpu.c:230
av_parse_time
int av_parse_time(int64_t *timeval, const char *timestr, int duration)
Parse timestr and return in *time a corresponding number of microseconds.
Definition: parseutils.c:587
abs
#define abs(x)
Definition: cuda_runtime.h:35
filter_frame
static int filter_frame(DBEDecodeContext *s, AVFrame *frame)
Definition: dolby_e.c:1049
AVOutputFormat::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: avformat.h:519
nb_input_streams
int nb_input_streams
Definition: ffmpeg.c:149
write_option
static int write_option(void *optctx, const OptionDef *po, const char *opt, const char *arg)
Definition: cmdutils.c:290
av_get_standard_channel_layout
int av_get_standard_channel_layout(unsigned index, uint64_t *layout, const char **name)
Get the value and name of a standard channel layout.
Definition: channel_layout.c:285
OptionGroup::opts
Option * opts
Definition: cmdutils.h:312
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:95
OptionGroup
Definition: cmdutils.h:308
resample
static int resample(SwrContext *s, AudioData *out_param, int out_count, const AudioData *in_param, int in_count)
Definition: swresample.c:487
ff_inlink_acknowledge_status
int ff_inlink_acknowledge_status(AVFilterLink *link, int *rstatus, int64_t *rpts)
Test and acknowledge the change of status on the link.
Definition: avfilter.c:1331
diff_bytes
static void diff_bytes(HYuvContext *s, uint8_t *dst, const uint8_t *src0, const uint8_t *src1, int w)
Definition: huffyuvenc.c:41
Range
Definition: vf_colorbalance.c:38
swresample.h
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
converted
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are converted
Definition: swscale.txt:46
H1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 H1
Definition: snow.txt:554
input_files
InputFile ** input_files
Definition: ffmpeg.c:150
AV_OPT_SEARCH_FAKE_OBJ
#define AV_OPT_SEARCH_FAKE_OBJ
The obj passed to av_opt_find() is fake – only a double pointer to AVClass instead of a required poin...
Definition: opt.h:568
av_bprint_is_complete
static int av_bprint_is_complete(const AVBPrint *buf)
Test if the print buffer is complete (not truncated).
Definition: bprint.h:185
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:46
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
avcodec_find_decoder
const AVCodec * avcodec_find_decoder(enum AVCodecID id)
Find a registered decoder with a matching codec ID.
Definition: allcodecs.c:910
AVFILTER_FLAG_DYNAMIC_OUTPUTS
#define AVFILTER_FLAG_DYNAMIC_OUTPUTS
The number of the filter outputs is not determined just by AVFilter.outputs.
Definition: avfilter.h:112
methods
FFmpeg multithreading methods
Definition: multithreading.txt:2
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
AV_CODEC_CAP_CHANNEL_CONF
#define AV_CODEC_CAP_CHANNEL_CONF
Codec should fill in channel configuration and samplerate instead of container.
Definition: codec.h:104
http
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i http
Definition: writing_filters.txt:29
ff_thread_release_buffer
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
Definition: pthread_frame.c:1097
locate_option
int locate_option(int argc, char **argv, const OptionDef *options, const char *optname)
Return index of option opt in argv or 0 if not found.
Definition: cmdutils.c:442
av_codec_is_decoder
int av_codec_is_decoder(const AVCodec *codec)
Definition: utils.c:79
interleave
static void interleave(uint8_t *dst, uint8_t *src, int w, int h, int dst_linesize, int src_linesize, enum FilterMode mode, int swap)
Definition: vf_il.c:114
FF_FILTER_FORWARD_STATUS_ALL
FF_FILTER_FORWARD_STATUS_ALL(outlink, filter)
codec_opts
AVDictionary * codec_opts
Definition: cmdutils.c:69
options
const OptionDef options[]
eval.h
show_help_demuxer
static void show_help_demuxer(const char *name)
Definition: cmdutils.c:1840
minimum
static float minimum(float src0, float src1)
Definition: dnn_backend_native_layer_mathbinary.c:48
blur
static void blur(uint8_t *dst, int dst_step, const uint8_t *src, int src_step, int len, int radius, int pixsize)
Definition: vf_boxblur.c:160
H3
vertical horizontal halfpel samples are found by H3[y][x]
Definition: snow.txt:427
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
get_audio_buffer
static AVFrame * get_audio_buffer(AVFilterLink *inlink, int nb_samples)
Definition: avf_concat.c:208
AV_SAMPLE_FMT_NB
@ AV_SAMPLE_FMT_NB
Number of sample formats. DO NOT USE if linking dynamically.
Definition: samplefmt.h:74
show_help
int show_help(void *optctx, const char *opt, const char *arg)
Generic -h handler common to all fftools.
Definition: cmdutils.c:1984
AVMediaType
AVMediaType
Definition: avutil.h:199
av_log_set_callback
void av_log_set_callback(void(*callback)(void *, int, const char *, va_list))
Set the logging callback.
Definition: log.c:455
ff_inlink_set_status
void ff_inlink_set_status(AVFilterLink *link, int status)
Set the status on an input link.
Definition: avfilter.c:1510
avformat_match_stream_specifier
int avformat_match_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
Check if the stream st contained in s is matched by the stream specifier spec.
Definition: utils.c:5144
scroll
static void scroll(AVFilterContext *ctx, AVFrame *in, AVFrame *out)
Definition: vf_scroll.c:111
copy
static void copy(const float *p1, float *p2, const int length)
Definition: vf_vaguedenoiser.c:194
Header
@ Header
Definition: mxfdec.c:65
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:326
AVClass::category
AVClassCategory category
Category used for visualization (like color) This is only set if the category is equal for all object...
Definition: log.h:120
output_files
OutputFile ** output_files
Definition: ffmpeg.c:155
cpu.h
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
FFMAX
#define FFMAX(a, b)
Definition: common.h:103
get_preset_file
FILE * get_preset_file(char *filename, size_t filename_size, const char *preset_name, int is_path, const char *codec_name)
Get a file corresponding to a preset file.
Definition: cmdutils.c:2033
PRINT_CODEC_SUPPORTED
#define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name)
Definition: cmdutils.c:1363
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:59
sample
#define sample
Definition: flacdsp_template.c:44
av_output_video_device_next
const AVOutputFormat * av_output_video_device_next(const AVOutputFormat *d)
Video output devices iterator.
Definition: alldevices.c:138
uninit_opts
void uninit_opts(void)
Uninitialize the cmdutils option system, in particular free the *_opts contexts and their contents.
Definition: cmdutils.c:86
size
int size
Definition: twinvq_data.h:10344
print_codecs
static void print_codecs(int encoder)
Definition: cmdutils.c:1594
section
Definition: ffprobe.c:146
swr_free
av_cold void swr_free(SwrContext **ss)
Free the given SwrContext and set the pointer to NULL.
Definition: swresample.c:137
AV_PIX_FMT_FLAG_BITSTREAM
#define AV_PIX_FMT_FLAG_BITSTREAM
All values of a component are bit-wise packed end to end.
Definition: pixdesc.h:125
av_frame_is_writable
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:472
FFDIFFSIGN
#define FFDIFFSIGN(x, y)
Comparator.
Definition: common.h:101
does
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See does
Definition: writing_filters.txt:66
setup_find_stream_info_opts
AVDictionary ** setup_find_stream_info_opts(AVFormatContext *s, AVDictionary *codec_opts)
Setup AVCodecContext options for avformat_find_stream_info().
Definition: cmdutils.c:2149
GET_SAMPLE_FMT_NAME
#define GET_SAMPLE_FMT_NAME(sample_fmt)
Definition: cmdutils.h:628
swscale
static int swscale(SwsContext *c, const uint8_t *src[], int srcStride[], int srcSliceY, int srcSliceH, uint8_t *dst[], int dstStride[])
Definition: swscale.c:238
av_demuxer_iterate
const AVInputFormat * av_demuxer_iterate(void **opaque)
Iterate over all registered demuxers.
Definition: allformats.c:554
printf
printf("static const uint8_t my_array[100] = {\n")
gray
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit gray
Definition: swscale.txt:52
show_protocols
int show_protocols(void *optctx, const char *opt, const char *arg)
Print a listing containing all the protocols supported by the program.
Definition: cmdutils.c:1657
av_log_get_flags
int av_log_get_flags(void)
Definition: log.c:450
avdevice.h
AVFilter::description
const char * description
A description of the filter.
Definition: avfilter.h:156
avdevice_free_list_devices
void avdevice_free_list_devices(AVDeviceInfoList **device_list)
Convenient function to free result of avdevice_list_devices().
Definition: avdevice.c:145
header
static const uint8_t header[24]
Definition: sdr2.c:67
AV_OPT_SEARCH_CHILDREN
#define AV_OPT_SEARCH_CHILDREN
Search in possible children of the given object first.
Definition: opt.h:560
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
CONFIG_FOOBAR_FILTER
#define CONFIG_FOOBAR_FILTER
encode
static void encode(AVCodecContext *ctx, AVFrame *frame, AVPacket *pkt, FILE *output)
Definition: encode_audio.c:95
height
#define height
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
H
#define H
Definition: pixlet.c:39
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:112
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
line
Definition: graph2dot.c:48
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
attributes.h
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2560
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
show_devices
int show_devices(void *optctx, const char *opt, const char *arg)
Print a listing containing all the devices supported by the program.
Definition: cmdutils.c:1358
av_strstart
int av_strstart(const char *str, const char *pfx, const char **ptr)
Return non-zero if pfx is a prefix of str.
Definition: avstring.c:34
that
if it could not because there are no more it should return AVERROR_EOF The typical implementation of request_frame for a filter with several inputs will look like that
Definition: filter_design.txt:273
N
#define N
Definition: af_mcompand.c:54
fact
static double fact(double i)
Definition: af_aiir.c:952
va_copy
#define va_copy(dst, src)
Definition: va_copy.h:31
version
version
Definition: libkvazaar.c:306
predictor
static void predictor(uint8_t *src, ptrdiff_t size)
Definition: exrenc.c:163
AVDeviceInfo::device_description
char * device_description
human friendly name
Definition: avdevice.h:459
show_pix_fmts
int show_pix_fmts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the pixel formats supported by the program.
Definition: cmdutils.c:1733
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
interpolation
static int interpolation(DeclickChannel *c, const double *src, int ar_order, double *acoefficients, int *index, int nb_errors, double *auxiliary, double *interpolated)
Definition: af_adeclick.c:365
Y
#define Y
Definition: boxblur.h:38
help
static void help(void)
Definition: dct.c:451
introduced
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was introduced
Definition: undefined.txt:38
AVOutputFormat::mime_type
const char * mime_type
Definition: avformat.h:498
XMM_CLOBBERS_ONLY
#define XMM_CLOBBERS_ONLY(...)
Definition: asm.h:99
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:192
implementations
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec implementations
Definition: multithreading.txt:29
distribution
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited distribution
Definition: writing_filters.txt:8
show_sample_fmts
int show_sample_fmts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the sample formats supported by the program.
Definition: cmdutils.c:1795
avcodec_descriptor_next
const AVCodecDescriptor * avcodec_descriptor_next(const AVCodecDescriptor *prev)
Iterate over all codec descriptors known to libavcodec.
Definition: codec_desc.c:3514
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:126
b
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not b
Definition: undefined.txt:32
show_banner
void show_banner(int argc, char **argv, const OptionDef *options)
Print the program banner to stderr.
Definition: cmdutils.c:1171
av_codec_is_encoder
int av_codec_is_encoder(const AVCodec *codec)
Definition: utils.c:74
layout
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
Definition: filter_design.txt:18
program_exit
static void(* program_exit)(int ret)
Definition: cmdutils.c:125
flag
#define flag(name)
Definition: cbs_av1.c:553
register_exit
void register_exit(void(*cb)(int ret))
Register a program-specific cleanup routine.
Definition: cmdutils.c:127
GET_CH_LAYOUT_DESC
#define GET_CH_LAYOUT_DESC(ch_layout)
Definition: cmdutils.h:639
AV_CODEC_PROP_LOSSLESS
#define AV_CODEC_PROP_LOSSLESS
Codec supports lossless compression.
Definition: codec_desc.h:82
tests
const TestCase tests[]
Definition: fifo_muxer.c:243
less
static int less(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:148
av_log_set_level
void av_log_set_level(int level)
Set the log level.
Definition: log.c:440
Type
Type
Definition: vf_idet.h:29
bprint.h
i
int i
Definition: input.c:407
AV_CODEC_ID_NONE
@ AV_CODEC_ID_NONE
Definition: codec_id.h:47
AVOutputFormat
Definition: avformat.h:490
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
print_all_libs_info
static void print_all_libs_info(int flags, int level)
Definition: cmdutils.c:1119
round
static av_always_inline av_const double round(double x)
Definition: libm.h:444
OPT_TIME
#define OPT_TIME
Definition: cmdutils.h:176
swr_opts
AVDictionary * swr_opts
Definition: cmdutils.c:68
LIBAVFILTER_VERSION_MINOR
#define LIBAVFILTER_VERSION_MINOR
Definition: version.h:33
available
if no frame is available
Definition: filter_design.txt:166
Code
One code in hash table.
Definition: lzwenc.c:42
display.h
needed
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is needed
Definition: filter_design.txt:212
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
delta
float delta
Definition: vorbis_enc_data.h:457
draw_edges
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use as it s useful too and the implementation is trivial when you re doing this Note that draw_edges() needs to be called before reporting progress. Before accessing a reference frame or its MVs
filter_codec_opts
AVDictionary * filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id, AVFormatContext *s, AVStream *st, const AVCodec *codec)
Filter out options for given codec.
Definition: cmdutils.c:2091
ilog2
any process which generates a stream compliant to the syntactical and semantic requirements and which is decodable by the process described in this spec shall be considered a conformant Snow encoder but not strictly required ilog2(x) is the rounded down logarithm of x with basis 2 ilog2(0)=0Type definitions
Definition: snow.txt:23
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
av_toupper
static av_const int av_toupper(int c)
Locale-independent conversion of ASCII characters to uppercase.
Definition: avstring.h:236
AVMEDIA_TYPE_ATTACHMENT
@ AVMEDIA_TYPE_ATTACHMENT
Opaque data information usually sparse.
Definition: avutil.h:205
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:279
CONFIG_HARDCODED_TABLES
#define CONFIG_HARDCODED_TABLES
Definition: aacps_tablegen_template.c:25
description
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf description
Definition: writing_filters.txt:86
SUINT
#define SUINT
Definition: dct32_template.c:30
SHOW_MUXERS
@ SHOW_MUXERS
Definition: cmdutils.c:78
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:243
get_codecs_sorted
static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs)
Definition: cmdutils.c:1504
filter
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector filter
Definition: writing_filters.txt:60
pix_sum
static int pix_sum(uint8_t *pix, int line_size, int w, int h)
Definition: snowenc.c:158
OPT_OUTPUT
#define OPT_OUTPUT
Definition: cmdutils.h:179
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:204
update_thread_context
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread. Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities. There will be very little speed gain at this point but it should work. If there are inter-frame dependencies
CONTEXT
#define CONTEXT
Definition: af_asetrate.c:31
opt_timelimit
int opt_timelimit(void *optctx, const char *opt, const char *arg)
Limit the execution time.
Definition: cmdutils.c:1060
OPT_OFFSET
#define OPT_OFFSET
Definition: cmdutils.h:174
mv_scale
static av_always_inline void mv_scale(Mv *dst, Mv *src, int td, int tb)
Definition: hevc_mvs.c:115
headroom
static int headroom(int *la)
Definition: nellymoser.c:104
plain
static const uint8_t plain[]
Definition: aes_ctr.c:24
opt_max_alloc
int opt_max_alloc(void *optctx, const char *opt, const char *arg)
Definition: cmdutils.c:1046
nb_output_files
int nb_output_files
Definition: ffmpeg.c:156
OptionParseContext::groups
OptionGroupList * groups
Definition: cmdutils.h:336
av_codec_iterate
const AVCodec * av_codec_iterate(void **opaque)
Iterate over all registered codecs.
Definition: allcodecs.c:862
log2
#define log2(x)
Definition: libm.h:404
needed
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be needed
Definition: swscale.txt:45
parse_optgroup
int parse_optgroup(void *optctx, OptionGroup *g)
Parse an options group and write results into optctx.
Definition: cmdutils.c:409
OptionDef::u
union OptionDef::@1 u
parse_loglevel
void parse_loglevel(int argc, char **argv, const OptionDef *options)
Find the '-loglevel' option in the command line args and apply it.
Definition: cmdutils.c:501
AVInputFormat::get_device_list
int(* get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list)
Returns device list with it properties.
Definition: avformat.h:744
AVFilter
Filter definition.
Definition: avfilter.h:145
version.h
OptionGroup::sws_dict
AVDictionary * sws_dict
Definition: cmdutils.h:318
directory
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile the full command issued by make and its output will be shown on the screen DBG Preprocess x86 external assembler files to a dbg asm file in the object directory
Definition: build_system.txt:12
language
Undefined Behavior In the C language
Definition: undefined.txt:3
SpecifierOpt
Definition: cmdutils.h:146
OptionGroup::resample_opts
AVDictionary * resample_opts
Definition: cmdutils.h:317
array
static int array[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:106
G
#define G
Definition: huffyuvdsp.h:33
files
Writing a table generator This documentation is preliminary Parts of the API are not good and should be changed Basic concepts A table generator consists of two files
Definition: tablegen.txt:8
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:832
AV_LOG_FATAL
#define AV_LOG_FATAL
Something went wrong and recovery is not possible.
Definition: log.h:175
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:365
__asm__
__asm__(".macro parse_r var r\n\t" "\\var = -1\n\t" _IFC_REG(0) _IFC_REG(1) _IFC_REG(2) _IFC_REG(3) _IFC_REG(4) _IFC_REG(5) _IFC_REG(6) _IFC_REG(7) _IFC_REG(8) _IFC_REG(9) _IFC_REG(10) _IFC_REG(11) _IFC_REG(12) _IFC_REG(13) _IFC_REG(14) _IFC_REG(15) _IFC_REG(16) _IFC_REG(17) _IFC_REG(18) _IFC_REG(19) _IFC_REG(20) _IFC_REG(21) _IFC_REG(22) _IFC_REG(23) _IFC_REG(24) _IFC_REG(25) _IFC_REG(26) _IFC_REG(27) _IFC_REG(28) _IFC_REG(29) _IFC_REG(30) _IFC_REG(31) ".iflt \\var\n\t" ".error \"Unable to parse register name \\r\"\n\t" ".endif\n\t" ".endm")
pred
static const float pred[4]
Definition: siprdata.h:259
currently
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this currently
Definition: swscale.txt:54
read_yesno
int read_yesno(void)
Return a positive value if a line read from standard input starts with [yY], otherwise return 0.
Definition: cmdutils.c:2022
links
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output links
Definition: filter_design.txt:14
av_strtod
double av_strtod(const char *numstr, char **tail)
Parse the string in numstr and return its value as a double.
Definition: eval.c:106
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
comment
static int FUNC() comment(CodedBitstreamContext *ctx, RWContext *rw, JPEGRawComment *current)
Definition: cbs_jpeg_syntax_template.c:174
av_strlcat
size_t av_strlcat(char *dst, const char *src, size_t size)
Append the string src to the string dst, but to a total length of no more than size - 1 bytes,...
Definition: avstring.c:93
OptionGroup::arg
const char * arg
Definition: cmdutils.h:310
AVDeviceInfoList
List of devices.
Definition: avdevice.h:465
uninit_parse_context
void uninit_parse_context(OptionParseContext *octx)
Free all allocated memory in an OptionParseContext.
Definition: cmdutils.c:712
log_callback_help
void log_callback_help(void *ptr, int level, const char *fmt, va_list vl)
Trivial log callback.
Definition: cmdutils.c:95
OPT_PERFILE
#define OPT_PERFILE
Definition: cmdutils.h:173
av_opt_get_key_value
int av_opt_get_key_value(const char **ropts, const char *key_val_sep, const char *pairs_sep, unsigned flags, char **rkey, char **rval)
Extract a key-value pair from the beginning of a string.
Definition: opt.c:1537
avformat.h
av_stream_get_side_data
uint8_t * av_stream_get_side_data(const AVStream *stream, enum AVPacketSideDataType type, size_t *size)
Get side information from stream.
Definition: utils.c:5323
av_bprintf
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
dict.h
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
av_get_channel_description
const char * av_get_channel_description(uint64_t channel)
Get the description of a given channel.
Definition: channel_layout.c:260
AV_LOG_SKIP_REPEATED
#define AV_LOG_SKIP_REPEATED
Skip repeated messages, this requires the user app to use av_log() instead of (f)printf as the 2 woul...
Definition: log.h:371
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
add_opt
static void add_opt(OptionParseContext *octx, const OptionDef *opt, const char *key, const char *val)
Definition: cmdutils.c:678
show_codecs
int show_codecs(void *optctx, const char *opt, const char *arg)
Print a listing containing all the codecs supported by the program.
Definition: cmdutils.c:1538
init_report
static int init_report(const char *env)
Definition: cmdutils.c:963
avfilter_pad_get_type
enum AVMediaType avfilter_pad_get_type(const AVFilterPad *pads, int pad_idx)
Get the type of an AVFilterPad.
Definition: avfilter.c:937
L
#define L(x)
Definition: vp56_arith.h:36
AVCodecContext
main external API structure.
Definition: avcodec.h:501
compare
static float compare(const AVFrame *haystack, const AVFrame *obj, int offx, int offy)
Definition: vf_find_rect.c:106
av_muxer_iterate
const AVOutputFormat * av_muxer_iterate(void **opaque)
Iterate over all registered muxers.
Definition: allformats.c:537
parse_option
int parse_option(void *optctx, const char *opt, const char *arg, const OptionDef *options)
Parse one given option.
Definition: cmdutils.c:345
get_rotation
double get_rotation(AVStream *st)
Definition: cmdutils.c:2188
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Non-inlined equivalent of av_mallocz_array().
Definition: mem.c:251
AVDeviceInfoList::default_device
int default_device
index of default device or -1 if no default
Definition: avdevice.h:468
av_opt_child_class_iterate
const AVClass * av_opt_child_class_iterate(const AVClass *parent, void **iter)
Iterate over potential AVOptions-enabled children of parent.
Definition: opt.c:1720
opt_cpuflags
int opt_cpuflags(void *optctx, const char *opt, const char *arg)
Override the cpuflags.
Definition: cmdutils.c:844
again
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining again
Definition: filter_design.txt:25
if
if(!keyframe)
Definition: snow.txt:61
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2241
AVBitStreamFilter
Definition: bsf.h:98
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
SHOW_DEMUXERS
@ SHOW_DEMUXERS
Definition: cmdutils.c:77
get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling get_buffer()
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
Compensation
Motion Compensation
Definition: snow.txt:418
Transform
Definition: deshake.h:47
headers
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile the full command issued by make and its output will be shown on the screen DBG Preprocess x86 external assembler files to a dbg asm file in the object which then gets compiled Helps in developing those assembler files DESTDIR Destination directory for the install useful to prepare packages or install FFmpeg in cross environments GEN Set to ‘1’ to generate the missing or mismatched references Makefile builds all the libraries and the executables fate Run the fate test note that you must have installed it fate list List all fate regression test targets install Install headers
Definition: build_system.txt:34
avfilter.h
av_match_name
int av_match_name(const char *name, const char *names)
Match instances of a name in a comma-separated list of names.
Definition: avstring.c:353
video
A Quick Description Of Rate Distortion Theory We want to encode a video
Definition: rate_distortion.txt:3
test
static void test(const char *pattern, const char *host)
Definition: noproxy.c:23
av_dict_parse_string
int av_dict_parse_string(AVDictionary **pm, const char *str, const char *key_val_sep, const char *pairs_sep, int flags)
Parse the key/value pairs list and add the parsed entries to a dictionary.
Definition: dict.c:180
values
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return values
Definition: filter_design.txt:263
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
AVOutputFormat::video_codec
enum AVCodecID video_codec
default video codec
Definition: avformat.h:502
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:77
samples
Filter the word “frame” indicates either a video frame or a group of audio samples
Definition: filter_design.txt:8
mean
static float mean(const float *input, int size)
Definition: vf_nnedi.c:864
av_find_input_format
const AVInputFormat * av_find_input_format(const char *short_name)
Find AVInputFormat based on the short name of the input format.
Definition: format.c:118
Option::val
const char * val
Definition: cmdutils.h:290
note
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 note
Definition: snow.txt:555
GROW_ARRAY
#define GROW_ARRAY(array, nb_elems)
Definition: cmdutils.h:619
IDCT
#define IDCT(H)
Definition: hevcdsp_template.c:240
avcodec_get_hw_config
const AVCodecHWConfig * avcodec_get_hw_config(const AVCodec *codec, int index)
Retrieve supported hardware configurations for a codec.
Definition: utils.c:848
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1525
AVFilterContext
An instance of a filter.
Definition: avfilter.h:333
need
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See and add some references if you feel like it Context Skip the headers and scroll down to the definition of FoobarContext This is your state context It is already filled with when you get it so do not worry about uninitialized reads into this context This is where you put all global information that you need
Definition: writing_filters.txt:75
factor
static const int factor[16]
Definition: vf_pp7.c:77
AV_CODEC_CAP_PARAM_CHANGE
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: codec.h:116
timeline
Definition: dashdec.c:46
https
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See https
Definition: writing_filters.txt:66
bad
static int bad(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:122
print_program_info
static void print_program_info(int flags, int level)
Definition: cmdutils.c:1131
shift
static int shift(int a, int b)
Definition: sonic.c:82
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
opt_default
int opt_default(void *optctx, const char *opt, const char *arg)
Fallback for options that are not explicitly handled, these will be parsed through AVOptions.
Definition: cmdutils.c:540
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:259
desc
const char * desc
Definition: libsvtav1.c:79
review
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited review
Definition: writing_filters.txt:8
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
av_log_default_callback
void av_log_default_callback(void *ptr, int level, const char *fmt, va_list vl)
Default logging callback.
Definition: log.c:346
av_guess_format
const AVOutputFormat * av_guess_format(const char *short_name, const char *filename, const char *mime_type)
Return the output format in the list of registered output formats which best matches the provided par...
Definition: format.c:51
add
static float add(float src0, float src1)
Definition: dnn_backend_native_layer_mathbinary.c:36
av_output_audio_device_next
const AVOutputFormat * av_output_audio_device_next(const AVOutputFormat *d)
Audio output devices iterator.
Definition: alldevices.c:133
AV_CODEC_CAP_SUBFRAMES
#define AV_CODEC_CAP_SUBFRAMES
Codec can output multiple frames per AVPacket Normally demuxers return one frame at a time,...
Definition: codec.h:95
AV_OPT_FLAG_SUBTITLE_PARAM
#define AV_OPT_FLAG_SUBTITLE_PARAM
Definition: opt.h:282
overflow
Undefined Behavior In the C some operations are like signed integer overflow
Definition: undefined.txt:3
show_colors
int show_colors(void *optctx, const char *opt, const char *arg)
Print a listing containing all the color names and values recognized by the program.
Definition: cmdutils.c:1719
codec_ids
static enum AVCodecID codec_ids[]
Definition: aac_adtstoasc_bsf.c:148
FF_CODEC_CAP_ALLOCATE_PROGRESS
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: internal.h:76
L3
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L3
Definition: snow.txt:554
it
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s it
Definition: writing_filters.txt:31
AV_CODEC_CAP_HYBRID
#define AV_CODEC_CAP_HYBRID
Codec is potentially backed by a hardware implementation, but not necessarily.
Definition: codec.h:164
av_get_known_color_name
const char * av_get_known_color_name(int color_idx, const uint8_t **rgbp)
Get the name of a color from the internal table of hard-coded named colors.
Definition: parseutils.c:434
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:70
OptionDef::name
const char * name
Definition: cmdutils.h:159
show_filters
int show_filters(void *optctx, const char *opt, const char *arg)
Print a listing containing all the filters supported by the program.
Definition: cmdutils.c:1672
show_encoders
int show_encoders(void *optctx, const char *opt, const char *arg)
Print a listing containing all the encoders supported by the program.
Definition: cmdutils.c:1639
functions
static const struct drawtext_function functions[]
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVDictionaryEntry
Definition: dict.h:81
opt_loglevel
int opt_loglevel(void *optctx, const char *opt, const char *arg)
Set the libav* libraries log level.
Definition: cmdutils.c:856
show_help_protocol
static void show_help_protocol(const char *name)
Definition: cmdutils.c:1858
FF_FILTER_FORWARD_STATUS
FF_FILTER_FORWARD_STATUS(inlink, outlink)
equal
static int equal(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:138
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:216
OptionGroupDef::sep
const char * sep
Option to be used as group separator.
Definition: cmdutils.h:300
channel_layouts
static const uint16_t channel_layouts[7]
Definition: dca_lbr.c:114
status_in
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the then the processing requires a frame on this link and the filter is expected to make efforts in that direction The status of input links is stored by the status_in
Definition: filter_design.txt:154
AVFILTER_FLAG_SUPPORT_TIMELINE
#define AVFILTER_FLAG_SUPPORT_TIMELINE
Handy mask to test whether the filter supports or no the timeline feature (internally or generically)...
Definition: avfilter.h:139
avoid
the frame and frame reference mechanism is intended to avoid
Definition: filter_design.txt:45
filters
ist filters[ist->nb_filters - 1]
Definition: ffmpeg_filter.c:191
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
cmdutils.h
ready
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already ready
Definition: filter_design.txt:258
format
fg outputs[0] format
Definition: ffmpeg_filter.c:175
htaps
static const double htaps[HTAPS]
The 2nd half (48 coeffs) of a 96-tap symmetric lowpass filter.
Definition: dsd_tablegen.h:55
OPT_BOOL
#define OPT_BOOL
Definition: cmdutils.h:162
d
d
Definition: ffmpeg_filter.c:156
once
static pthread_once_t once
Definition: ffjni.c:36
inverse
static uint32_t inverse(uint32_t v)
find multiplicative inverse modulo 2 ^ 32
Definition: asfcrypt.c:35
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
int32_t
int32_t
Definition: audioconvert.c:56
convert_header.str
string str
Definition: convert_header.py:20
parse_time_or_die
int64_t parse_time_or_die(const char *context, const char *timestr, int is_duration)
Parse a string specifying a time and return its corresponding value as a number of microseconds.
Definition: cmdutils.c:161
grow_array
void * grow_array(void *array, int elem_size, int *size, int new_size)
Realloc array to hold new_size elements of elem_size.
Definition: cmdutils.c:2169
imgutils.h
OutputStream
Definition: muxing.c:53
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
rgb
static const SheerTable rgb[2]
Definition: sheervideodata.h:32
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
av_strlcpy
size_t av_strlcpy(char *dst, const char *src, size_t size)
Copy the string src to dst, but no more than size - 1 bytes, and null-terminate dst.
Definition: avstring.c:83
coeff
static const double coeff[2][5]
Definition: vf_owdenoise.c:73
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
OptionParseContext::nb_groups
int nb_groups
Definition: cmdutils.h:337
AV_CODEC_CAP_AVOID_PROBING
#define AV_CODEC_CAP_AVOID_PROBING
Decoder is not a preferred choice for probing.
Definition: codec.h:139
pix_norm1
static int pix_norm1(uint8_t *pix, int line_size, int w)
Definition: snowenc.c:174
find_option
static const OptionDef * find_option(const OptionDef *po, const char *name)
Definition: cmdutils.c:215
AVCodecHWConfig
Definition: codec.h:445
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
h
h
Definition: vp9dsp_template.c:2038
ff_outlink_frame_wanted
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the ff_outlink_frame_wanted() function. If this function returns true
avcodec_descriptor_get
const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id)
Definition: codec_desc.c:3508
Sequence
@ Sequence
Definition: mxf.h:37
AVDictionaryEntry::value
char * value
Definition: dict.h:83
av_input_video_device_next
const AVInputFormat * av_input_video_device_next(const AVInputFormat *d)
Video input devices iterator.
Definition: alldevices.c:128
avstring.h
dimension
The official guide to swscale for confused that consecutive non overlapping rectangles of dimension(0, slice_top) -(picture_width
project
static float project(float origin_x, float origin_y, float dest_x, float dest_y, int point_x, int point_y)
Definition: vsrc_gradients.c:156
show_help_options
void show_help_options(const OptionDef *options, const char *msg, int req_flags, int rej_flags, int alt_flags)
Print help for all options matching specified flags.
Definition: cmdutils.c:173
show_bsfs
int show_bsfs(void *optctx, const char *opt, const char *arg)
Print a listing containing all the bit stream filters supported by the program.
Definition: cmdutils.c:1645
Otherwise
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied Otherwise
Definition: writing_filters.txt:6
show_license
int show_license(void *optctx, const char *opt, const char *arg)
Print the license of the program to stdout.
Definition: cmdutils.c:1199
PRINT_LIB_INFO
#define PRINT_LIB_INFO(libname, LIBNAME, flags, level)
Definition: cmdutils.c:1090
monolithic
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is monolithic
Definition: writing_filters.txt:4
avcodec_descriptor_get_by_name
const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char *name)
Definition: codec_desc.c:3523
planar
uint8_t pi<< 24) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_U8,(uint64_t)((*(const uint8_t *) pi - 0x80U))<< 56) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16,(*(const int16_t *) pi >>8)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1<< 16)) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S16,(uint64_t)(*(const int16_t *) pi)<< 48) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32,(*(const int32_t *) pi >>24)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S32,(uint64_t)(*(const int32_t *) pi)<< 32) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S64,(*(const int64_t *) pi >>56)+0x80) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0f/(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0/(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_FLT, llrintf(*(const float *) pi *(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_DBL, llrint(*(const double *) pi *(UINT64_C(1)<< 63))) #define FMT_PAIR_FUNC(out, in) static conv_func_type *const fmt_pair_to_conv_functions[AV_SAMPLE_FMT_NB *AV_SAMPLE_FMT_NB]={ FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S64), };static void cpy1(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, len);} static void cpy2(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 2 *len);} static void cpy4(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 4 *len);} static void cpy8(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 8 *len);} AudioConvert *swri_audio_convert_alloc(enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, const int *ch_map, int flags) { AudioConvert *ctx;conv_func_type *f=fmt_pair_to_conv_functions[av_get_packed_sample_fmt(out_fmt)+AV_SAMPLE_FMT_NB *av_get_packed_sample_fmt(in_fmt)];if(!f) return NULL;ctx=av_mallocz(sizeof(*ctx));if(!ctx) return NULL;if(channels==1){ in_fmt=av_get_planar_sample_fmt(in_fmt);out_fmt=av_get_planar_sample_fmt(out_fmt);} ctx->channels=channels;ctx->conv_f=f;ctx->ch_map=ch_map;if(in_fmt==AV_SAMPLE_FMT_U8||in_fmt==AV_SAMPLE_FMT_U8P) memset(ctx->silence, 0x80, sizeof(ctx->silence));if(out_fmt==in_fmt &&!ch_map) { switch(av_get_bytes_per_sample(in_fmt)){ case 1:ctx->simd_f=cpy1;break;case 2:ctx->simd_f=cpy2;break;case 4:ctx->simd_f=cpy4;break;case 8:ctx->simd_f=cpy8;break;} } if(HAVE_X86ASM &&1) swri_audio_convert_init_x86(ctx, out_fmt, in_fmt, channels);if(ARCH_ARM) swri_audio_convert_init_arm(ctx, out_fmt, in_fmt, channels);if(ARCH_AARCH64) swri_audio_convert_init_aarch64(ctx, out_fmt, in_fmt, channels);return ctx;} void swri_audio_convert_free(AudioConvert **ctx) { av_freep(ctx);} int swri_audio_convert(AudioConvert *ctx, AudioData *out, AudioData *in, int len) { int ch;int off=0;const int os=(out->planar ? 1 :out->ch_count) *out->bps;unsigned misaligned=0;av_assert0(ctx->channels==out->ch_count);if(ctx->in_simd_align_mask) { int planes=in->planar ? in->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) in->ch[ch];misaligned|=m &ctx->in_simd_align_mask;} if(ctx->out_simd_align_mask) { int planes=out->planar ? out->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) out->ch[ch];misaligned|=m &ctx->out_simd_align_mask;} if(ctx->simd_f &&!ctx->ch_map &&!misaligned){ off=len &~15;av_assert1(off >=0);av_assert1(off<=len);av_assert2(ctx->channels==SWR_CH_MAX||!in->ch[ctx->channels]);if(off >0){ if(out->planar==in->planar){ int planes=out->planar ? out->ch_count :1;for(ch=0;ch< planes;ch++){ ctx->simd_f(out->ch+ch,(const uint8_t **) in->ch+ch, off *(out-> planar
Definition: audioconvert.c:56
AV_CODEC_CAP_SMALL_LAST_FRAME
#define AV_CODEC_CAP_SMALL_LAST_FRAME
Codec can be fed a final frame with a smaller size.
Definition: codec.h:82
SwsContext
Definition: swscale_internal.h:283
av_opt_show2
int av_opt_show2(void *obj, void *av_log_obj, int req_flags, int rej_flags)
Show the obj options.
Definition: opt.c:1346
show_help_muxer
static void show_help_muxer(const char *name)
Definition: cmdutils.c:1876
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:121
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:81
snprintf
#define snprintf
Definition: snprintf.h:34
filter
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 this can end with a L or a the number of elements shall be w s[-1] shall be considered equivalent to s[1] s[w] shall be considered equivalent to s[w-2] perform the lifting steps in order as described below Integer filter
Definition: snow.txt:562
sanitize
static void sanitize(uint8_t *line)
Definition: log.c:245
do
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they do
Definition: writing_filters.txt:90
OptionParseContext::cur_group
OptionGroup cur_group
Definition: cmdutils.h:340
passed
static int passed(HysteresisContext *s, int x, int y, int w)
Definition: vf_hysteresis.c:140
AV_LOG_PRINT_LEVEL
#define AV_LOG_PRINT_LEVEL
Include the log severity in messages originating from codecs.
Definition: log.h:379
get_video_buffer
static AVFrame * get_video_buffer(AVFilterLink *inlink, int w, int h)
Definition: avf_concat.c:199
Therefore
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input Therefore
Definition: filter_design.txt:244
undefined
Undefined Behavior In the C some operations are undefined
Definition: undefined.txt:3
is_device
static int is_device(const AVClass *avclass)
Definition: cmdutils.c:1273
av_bprint_chars
void av_bprint_chars(AVBPrint *buf, char c, unsigned n)
Append char c n times to a print buffer.
Definition: bprint.c:140
show_demuxers
int show_demuxers(void *optctx, const char *opt, const char *arg)
Print a listing containing all the demuxer supported by the program (including devices).
Definition: cmdutils.c:1353
channel
channel
Definition: ebur128.h:39
swscale.h
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: aeval.c:274
match_group_separator
static int match_group_separator(const OptionGroupDef *groups, int nb_groups, const char *opt)
Definition: cmdutils.c:627
AVInputFormat::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: avformat.h:646
foobar
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf foobar
Definition: writing_filters.txt:84
OptionDef::func_arg
int(* func_arg)(void *, const char *, const char *)
Definition: cmdutils.h:182
opt_find
static const AVOption * opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Definition: cmdutils.c:530
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
nb_output_streams
int nb_output_streams
Definition: ffmpeg.c:154
av_display_rotation_get
double av_display_rotation_get(const int32_t matrix[9])
Extract the rotation component of the transformation matrix.
Definition: display.c:34
avcodec_find_encoder_by_name
const AVCodec * avcodec_find_encoder_by_name(const char *name)
Find a registered encoder with the specified name.
Definition: allcodecs.c:933
OutputFile
Definition: ffmpeg.h:575
re
float re
Definition: fft.c:82
GEN
#define GEN(table)
program_birth_year
const int program_birth_year
program birth year, defined by the program for show_banner()
Definition: ffmpeg.c:110
min
float min
Definition: vorbis_enc_data.h:456
OptionDef::flags
int flags
Definition: cmdutils.h:160
OPT_DOUBLE
#define OPT_DOUBLE
Definition: cmdutils.h:177
dither
static const uint8_t dither[8][8]
Definition: vf_fspp.c:59